var/home/core/zuul-output/0000755000175000017500000000000015114062367014532 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114101012015453 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000006122111215114101003017653 0ustar rootrootDec 03 16:31:15 crc systemd[1]: Starting Kubernetes Kubelet... Dec 03 16:31:15 crc restorecon[4761]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:15 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:31:16 crc restorecon[4761]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:31:16 crc restorecon[4761]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 03 16:31:16 crc kubenswrapper[5002]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 16:31:16 crc kubenswrapper[5002]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 03 16:31:16 crc kubenswrapper[5002]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 16:31:16 crc kubenswrapper[5002]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 16:31:16 crc kubenswrapper[5002]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 03 16:31:16 crc kubenswrapper[5002]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.682000 5002 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686849 5002 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686869 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686875 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686879 5002 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686884 5002 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686888 5002 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686893 5002 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686897 5002 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686903 5002 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686908 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686911 5002 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686915 5002 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686919 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686923 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686926 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686930 5002 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686934 5002 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686938 5002 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686941 5002 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686945 5002 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686949 5002 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686953 5002 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686957 5002 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686961 5002 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686965 5002 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686969 5002 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686974 5002 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686978 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686982 5002 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686985 5002 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686989 5002 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686993 5002 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.686999 5002 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687011 5002 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687016 5002 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687020 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687025 5002 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687030 5002 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687035 5002 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687039 5002 feature_gate.go:330] unrecognized feature gate: Example Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687043 5002 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687049 5002 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687053 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687057 5002 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687061 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687065 5002 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687068 5002 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687073 5002 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687077 5002 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687081 5002 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687084 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687088 5002 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687092 5002 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687096 5002 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687102 5002 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687109 5002 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687112 5002 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687119 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687138 5002 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687142 5002 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687146 5002 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687151 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687155 5002 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687159 5002 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687164 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687169 5002 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687174 5002 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687180 5002 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687185 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687190 5002 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.687195 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687308 5002 flags.go:64] FLAG: --address="0.0.0.0" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687323 5002 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687334 5002 flags.go:64] FLAG: --anonymous-auth="true" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687343 5002 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687356 5002 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687361 5002 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687370 5002 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687377 5002 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687383 5002 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687388 5002 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687394 5002 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687399 5002 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687405 5002 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687410 5002 flags.go:64] FLAG: --cgroup-root="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687415 5002 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687420 5002 flags.go:64] FLAG: --client-ca-file="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687426 5002 flags.go:64] FLAG: --cloud-config="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687431 5002 flags.go:64] FLAG: --cloud-provider="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687436 5002 flags.go:64] FLAG: --cluster-dns="[]" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687445 5002 flags.go:64] FLAG: --cluster-domain="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687450 5002 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687455 5002 flags.go:64] FLAG: --config-dir="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687461 5002 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687467 5002 flags.go:64] FLAG: --container-log-max-files="5" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687474 5002 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687479 5002 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687484 5002 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687489 5002 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687494 5002 flags.go:64] FLAG: --contention-profiling="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687500 5002 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687506 5002 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687512 5002 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687516 5002 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687526 5002 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687532 5002 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687537 5002 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687542 5002 flags.go:64] FLAG: --enable-load-reader="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687547 5002 flags.go:64] FLAG: --enable-server="true" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687552 5002 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687559 5002 flags.go:64] FLAG: --event-burst="100" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687564 5002 flags.go:64] FLAG: --event-qps="50" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687570 5002 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687576 5002 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687581 5002 flags.go:64] FLAG: --eviction-hard="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687588 5002 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687594 5002 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687599 5002 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687605 5002 flags.go:64] FLAG: --eviction-soft="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687611 5002 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687617 5002 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687623 5002 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687629 5002 flags.go:64] FLAG: --experimental-mounter-path="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687635 5002 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687641 5002 flags.go:64] FLAG: --fail-swap-on="true" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687646 5002 flags.go:64] FLAG: --feature-gates="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687654 5002 flags.go:64] FLAG: --file-check-frequency="20s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687659 5002 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687665 5002 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687670 5002 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687675 5002 flags.go:64] FLAG: --healthz-port="10248" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687681 5002 flags.go:64] FLAG: --help="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687686 5002 flags.go:64] FLAG: --hostname-override="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687691 5002 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687696 5002 flags.go:64] FLAG: --http-check-frequency="20s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687701 5002 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687708 5002 flags.go:64] FLAG: --image-credential-provider-config="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687712 5002 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687718 5002 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687723 5002 flags.go:64] FLAG: --image-service-endpoint="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687729 5002 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687734 5002 flags.go:64] FLAG: --kube-api-burst="100" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687739 5002 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687768 5002 flags.go:64] FLAG: --kube-api-qps="50" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687773 5002 flags.go:64] FLAG: --kube-reserved="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687779 5002 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687784 5002 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687789 5002 flags.go:64] FLAG: --kubelet-cgroups="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687794 5002 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687799 5002 flags.go:64] FLAG: --lock-file="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687804 5002 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687811 5002 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687817 5002 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687826 5002 flags.go:64] FLAG: --log-json-split-stream="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687832 5002 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687837 5002 flags.go:64] FLAG: --log-text-split-stream="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687841 5002 flags.go:64] FLAG: --logging-format="text" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687846 5002 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687851 5002 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687855 5002 flags.go:64] FLAG: --manifest-url="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687859 5002 flags.go:64] FLAG: --manifest-url-header="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687865 5002 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687869 5002 flags.go:64] FLAG: --max-open-files="1000000" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687875 5002 flags.go:64] FLAG: --max-pods="110" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687879 5002 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687884 5002 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687888 5002 flags.go:64] FLAG: --memory-manager-policy="None" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687892 5002 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687898 5002 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687902 5002 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687906 5002 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687918 5002 flags.go:64] FLAG: --node-status-max-images="50" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687922 5002 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687926 5002 flags.go:64] FLAG: --oom-score-adj="-999" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687931 5002 flags.go:64] FLAG: --pod-cidr="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687935 5002 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687944 5002 flags.go:64] FLAG: --pod-manifest-path="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687948 5002 flags.go:64] FLAG: --pod-max-pids="-1" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687952 5002 flags.go:64] FLAG: --pods-per-core="0" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687956 5002 flags.go:64] FLAG: --port="10250" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687961 5002 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687965 5002 flags.go:64] FLAG: --provider-id="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687970 5002 flags.go:64] FLAG: --qos-reserved="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687979 5002 flags.go:64] FLAG: --read-only-port="10255" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687984 5002 flags.go:64] FLAG: --register-node="true" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687988 5002 flags.go:64] FLAG: --register-schedulable="true" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.687992 5002 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688000 5002 flags.go:64] FLAG: --registry-burst="10" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688019 5002 flags.go:64] FLAG: --registry-qps="5" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688024 5002 flags.go:64] FLAG: --reserved-cpus="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688029 5002 flags.go:64] FLAG: --reserved-memory="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688035 5002 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688039 5002 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688044 5002 flags.go:64] FLAG: --rotate-certificates="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688048 5002 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688052 5002 flags.go:64] FLAG: --runonce="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688057 5002 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688061 5002 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688065 5002 flags.go:64] FLAG: --seccomp-default="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688069 5002 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688074 5002 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688079 5002 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688084 5002 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688089 5002 flags.go:64] FLAG: --storage-driver-password="root" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688093 5002 flags.go:64] FLAG: --storage-driver-secure="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688097 5002 flags.go:64] FLAG: --storage-driver-table="stats" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688102 5002 flags.go:64] FLAG: --storage-driver-user="root" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688107 5002 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688112 5002 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688117 5002 flags.go:64] FLAG: --system-cgroups="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688122 5002 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688130 5002 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688135 5002 flags.go:64] FLAG: --tls-cert-file="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688139 5002 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688145 5002 flags.go:64] FLAG: --tls-min-version="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688151 5002 flags.go:64] FLAG: --tls-private-key-file="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688156 5002 flags.go:64] FLAG: --topology-manager-policy="none" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688160 5002 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688164 5002 flags.go:64] FLAG: --topology-manager-scope="container" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688169 5002 flags.go:64] FLAG: --v="2" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688176 5002 flags.go:64] FLAG: --version="false" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688182 5002 flags.go:64] FLAG: --vmodule="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688188 5002 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688193 5002 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688312 5002 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688318 5002 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688322 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688327 5002 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688331 5002 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688335 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688339 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688343 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688347 5002 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688351 5002 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688356 5002 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688361 5002 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688366 5002 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688370 5002 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688373 5002 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688377 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688380 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688384 5002 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688387 5002 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688391 5002 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688395 5002 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688398 5002 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688402 5002 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688406 5002 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688409 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688413 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688416 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688420 5002 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688423 5002 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688427 5002 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688431 5002 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688434 5002 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688438 5002 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688442 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688445 5002 feature_gate.go:330] unrecognized feature gate: Example Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688449 5002 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688454 5002 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688457 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688461 5002 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688465 5002 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688468 5002 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688475 5002 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688478 5002 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688482 5002 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688487 5002 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688491 5002 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688495 5002 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688499 5002 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688503 5002 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688507 5002 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688513 5002 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688516 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688520 5002 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688524 5002 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688527 5002 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688531 5002 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688534 5002 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688538 5002 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688543 5002 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688548 5002 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688552 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688556 5002 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688561 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688568 5002 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688576 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688581 5002 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688586 5002 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688591 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688596 5002 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688600 5002 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.688605 5002 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.688621 5002 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.698302 5002 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.698352 5002 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698453 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698468 5002 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698473 5002 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698479 5002 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698488 5002 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698494 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698498 5002 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698503 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698507 5002 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698512 5002 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698517 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698522 5002 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698526 5002 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698530 5002 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698534 5002 feature_gate.go:330] unrecognized feature gate: Example Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698538 5002 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698541 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698545 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698549 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698552 5002 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698556 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698560 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698563 5002 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698567 5002 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698571 5002 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698575 5002 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698579 5002 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698583 5002 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698587 5002 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698593 5002 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698598 5002 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698604 5002 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698609 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698614 5002 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698620 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698625 5002 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698629 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698634 5002 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698638 5002 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698642 5002 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698646 5002 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698650 5002 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698653 5002 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698657 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698662 5002 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698665 5002 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698670 5002 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698675 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698680 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698683 5002 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698687 5002 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698691 5002 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698695 5002 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698700 5002 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698704 5002 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698709 5002 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698715 5002 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698720 5002 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698723 5002 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698727 5002 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698732 5002 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698736 5002 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698743 5002 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698762 5002 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698767 5002 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698773 5002 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698777 5002 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698782 5002 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698786 5002 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698790 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698794 5002 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.698802 5002 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698964 5002 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698971 5002 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698975 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698979 5002 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698985 5002 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698991 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698995 5002 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.698999 5002 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699003 5002 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699007 5002 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699010 5002 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699014 5002 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699017 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699021 5002 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699025 5002 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699028 5002 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699033 5002 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699038 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699041 5002 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699046 5002 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699049 5002 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699053 5002 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699057 5002 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699061 5002 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699065 5002 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699070 5002 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699076 5002 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699081 5002 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699085 5002 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699091 5002 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699096 5002 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699100 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699105 5002 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699110 5002 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699113 5002 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699117 5002 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699121 5002 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699125 5002 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699129 5002 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699132 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699136 5002 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699140 5002 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699144 5002 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699148 5002 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699152 5002 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699156 5002 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699160 5002 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699163 5002 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699167 5002 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699172 5002 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699176 5002 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699179 5002 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699184 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699188 5002 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699194 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699227 5002 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699231 5002 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699235 5002 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699239 5002 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699243 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699247 5002 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699251 5002 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699255 5002 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699259 5002 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699264 5002 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699270 5002 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699274 5002 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699278 5002 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699283 5002 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699289 5002 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.699293 5002 feature_gate.go:330] unrecognized feature gate: Example Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.699302 5002 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.699535 5002 server.go:940] "Client rotation is on, will bootstrap in background" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.702198 5002 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.702292 5002 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.702901 5002 server.go:997] "Starting client certificate rotation" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.702932 5002 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.703388 5002 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-29 21:34:17.857539164 +0000 UTC Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.703525 5002 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.709456 5002 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 16:31:16 crc kubenswrapper[5002]: E1203 16:31:16.710832 5002 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.155:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.711044 5002 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.720885 5002 log.go:25] "Validated CRI v1 runtime API" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.738296 5002 log.go:25] "Validated CRI v1 image API" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.739947 5002 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.742333 5002 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-03-16-26-40-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.742365 5002 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.756669 5002 manager.go:217] Machine: {Timestamp:2025-12-03 16:31:16.75511051 +0000 UTC m=+0.168932418 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:c926b5a3-23cd-42f4-be44-84fd294ba72b BootID:1f887d77-b9a6-4290-9d59-445b9644ebf8 Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:0e:d6:53 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:0e:d6:53 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:c0:51:17 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:15:fd:3d Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:e3:4a:6e Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:ae:d6:92 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:15:24:81 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:42:30:c4:c4:f8:c4 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:46:7a:a2:64:b0:e6 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.757003 5002 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.757388 5002 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.758564 5002 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.758988 5002 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.759050 5002 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.759414 5002 topology_manager.go:138] "Creating topology manager with none policy" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.759434 5002 container_manager_linux.go:303] "Creating device plugin manager" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.759835 5002 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.759882 5002 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.760388 5002 state_mem.go:36] "Initialized new in-memory state store" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.760549 5002 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.761654 5002 kubelet.go:418] "Attempting to sync node with API server" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.761688 5002 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.761729 5002 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.761778 5002 kubelet.go:324] "Adding apiserver pod source" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.761798 5002 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.764386 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.155:6443: connect: connection refused Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.764528 5002 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 03 16:31:16 crc kubenswrapper[5002]: E1203 16:31:16.764559 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.155:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.764608 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.155:6443: connect: connection refused Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.764888 5002 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 03 16:31:16 crc kubenswrapper[5002]: E1203 16:31:16.764877 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.155:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.765600 5002 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766200 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766221 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766228 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766236 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766249 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766260 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766269 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766282 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766290 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766298 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766310 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766317 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.766465 5002 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.767058 5002 server.go:1280] "Started kubelet" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.767418 5002 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.767669 5002 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.155:6443: connect: connection refused Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.767743 5002 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.768882 5002 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 03 16:31:16 crc kubenswrapper[5002]: E1203 16:31:16.769145 5002 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.155:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187dc1957b98ee5b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 16:31:16.767010395 +0000 UTC m=+0.180832283,LastTimestamp:2025-12-03 16:31:16.767010395 +0000 UTC m=+0.180832283,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 16:31:16 crc systemd[1]: Started Kubernetes Kubelet. Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.772813 5002 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.772911 5002 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.773669 5002 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 14:46:04.084610202 +0000 UTC Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.774122 5002 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 22h14m47.310495354s for next certificate rotation Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.773770 5002 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.773731 5002 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.774176 5002 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 03 16:31:16 crc kubenswrapper[5002]: E1203 16:31:16.773816 5002 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 16:31:16 crc kubenswrapper[5002]: E1203 16:31:16.774423 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" interval="200ms" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.774715 5002 factory.go:55] Registering systemd factory Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.774733 5002 factory.go:221] Registration of the systemd container factory successfully Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.775318 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.155:6443: connect: connection refused Dec 03 16:31:16 crc kubenswrapper[5002]: E1203 16:31:16.775392 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.155:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.775794 5002 factory.go:153] Registering CRI-O factory Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.775854 5002 factory.go:221] Registration of the crio container factory successfully Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.776163 5002 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.776197 5002 factory.go:103] Registering Raw factory Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.776228 5002 manager.go:1196] Started watching for new ooms in manager Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.777341 5002 manager.go:319] Starting recovery of all containers Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.778153 5002 server.go:460] "Adding debug handlers to kubelet server" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.793967 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794045 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794067 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794085 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794104 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794121 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794138 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794154 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794176 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794193 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794209 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794226 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794242 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794262 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794279 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794295 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794313 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794331 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794345 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794359 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794385 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794401 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794417 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794432 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794449 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794465 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794485 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794502 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794518 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794533 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794549 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794567 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794585 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794641 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794658 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794673 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794689 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794704 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794718 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794737 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794828 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794845 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794860 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794878 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794894 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794910 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794928 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794944 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794962 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794978 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.794994 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795011 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795033 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795053 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795070 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795087 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795128 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795148 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795167 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795183 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795199 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795216 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795241 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795257 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795275 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795563 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795578 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795593 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795610 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795625 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795640 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795654 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795670 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795685 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795706 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795725 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795765 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795785 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795800 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795852 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795868 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795883 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795898 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795913 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795928 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795941 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795955 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795970 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.795985 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796001 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796664 5002 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796699 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796718 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796736 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796780 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796800 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796818 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796836 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796851 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796869 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796887 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796906 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796922 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796937 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796952 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796973 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.796993 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797012 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797030 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797048 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797066 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797086 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797104 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797121 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797137 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797157 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797172 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797187 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797202 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797238 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797256 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797277 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797293 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797312 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797329 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797347 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797363 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797378 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797395 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797411 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797427 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797443 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797459 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797474 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797491 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797507 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797524 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797542 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797559 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797574 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797590 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797607 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797623 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797637 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797669 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797685 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797703 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797720 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797738 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797782 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797797 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797813 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797828 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797846 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797861 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797878 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797893 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797909 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797924 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797939 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797956 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797974 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.797992 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798010 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798025 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798040 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798056 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798070 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798085 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798103 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798118 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798133 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798148 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798163 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798177 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798195 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798220 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798236 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798250 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798265 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798280 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798295 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798310 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798324 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798339 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798354 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798369 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798383 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798398 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798411 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798425 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798439 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798455 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798469 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798486 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798500 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798516 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798533 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798548 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798563 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798578 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798592 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798607 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798621 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798635 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798648 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798661 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798675 5002 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798691 5002 reconstruct.go:97] "Volume reconstruction finished" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.798701 5002 reconciler.go:26] "Reconciler: start to sync state" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.805638 5002 manager.go:324] Recovery completed Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.814176 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.816823 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.816897 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.816918 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.818489 5002 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.818523 5002 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.818669 5002 state_mem.go:36] "Initialized new in-memory state store" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.830193 5002 policy_none.go:49] "None policy: Start" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.831705 5002 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.831771 5002 state_mem.go:35] "Initializing new in-memory state store" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.836194 5002 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.839000 5002 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.839052 5002 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.839083 5002 kubelet.go:2335] "Starting kubelet main sync loop" Dec 03 16:31:16 crc kubenswrapper[5002]: E1203 16:31:16.839133 5002 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 03 16:31:16 crc kubenswrapper[5002]: W1203 16:31:16.840407 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.155:6443: connect: connection refused Dec 03 16:31:16 crc kubenswrapper[5002]: E1203 16:31:16.840526 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.155:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:31:16 crc kubenswrapper[5002]: E1203 16:31:16.874791 5002 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.897627 5002 manager.go:334] "Starting Device Plugin manager" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.897693 5002 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.897706 5002 server.go:79] "Starting device plugin registration server" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.898321 5002 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.898339 5002 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.898517 5002 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.898632 5002 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.898648 5002 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 03 16:31:16 crc kubenswrapper[5002]: E1203 16:31:16.906304 5002 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.940038 5002 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.940119 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.941149 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.941180 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.941190 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.941305 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.941599 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.941652 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.941983 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.942004 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.942013 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.942109 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.942262 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.942309 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.942597 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.942619 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.942631 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.942773 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.942854 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.942876 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.942886 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.943120 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.943155 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.943474 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.943497 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.943507 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.943640 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.943740 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.943771 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.944167 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.944188 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.944199 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.944204 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.944231 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.944240 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.944841 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.944863 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.944872 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.944953 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.944997 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.945010 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.945300 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.945344 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.946315 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.946343 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.946353 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:16 crc kubenswrapper[5002]: E1203 16:31:16.975221 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" interval="400ms" Dec 03 16:31:16 crc kubenswrapper[5002]: I1203 16:31:16.998888 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.000379 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.000432 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.000445 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.000476 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001165 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001212 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: E1203 16:31:17.001202 5002 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.155:6443: connect: connection refused" node="crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001246 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001276 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001306 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001385 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001452 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001503 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001547 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001597 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001639 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001685 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001734 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001832 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.001915 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103646 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103704 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103729 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103761 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103784 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103807 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103825 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103853 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103875 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103911 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103931 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103954 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103951 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103978 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104009 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104002 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104055 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.103918 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104086 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104109 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104087 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104015 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104134 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104011 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104115 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104144 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104174 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104202 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104212 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.104080 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: E1203 16:31:17.126003 5002 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.155:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187dc1957b98ee5b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 16:31:16.767010395 +0000 UTC m=+0.180832283,LastTimestamp:2025-12-03 16:31:16.767010395 +0000 UTC m=+0.180832283,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.201383 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.203184 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.203244 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.203281 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.203322 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 16:31:17 crc kubenswrapper[5002]: E1203 16:31:17.203893 5002 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.155:6443: connect: connection refused" node="crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.265122 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.285331 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: W1203 16:31:17.300461 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-518cb37aee9b36ea4f4090f7cd22846393640dabde41303cc8f914b25cfde9f4 WatchSource:0}: Error finding container 518cb37aee9b36ea4f4090f7cd22846393640dabde41303cc8f914b25cfde9f4: Status 404 returned error can't find the container with id 518cb37aee9b36ea4f4090f7cd22846393640dabde41303cc8f914b25cfde9f4 Dec 03 16:31:17 crc kubenswrapper[5002]: W1203 16:31:17.307885 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-0dda59d6e0527043208c4d74336e4bd80951f53a867d1c4903e2111183d25f10 WatchSource:0}: Error finding container 0dda59d6e0527043208c4d74336e4bd80951f53a867d1c4903e2111183d25f10: Status 404 returned error can't find the container with id 0dda59d6e0527043208c4d74336e4bd80951f53a867d1c4903e2111183d25f10 Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.308519 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.323015 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.331049 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:17 crc kubenswrapper[5002]: W1203 16:31:17.333194 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-876d2bff87d74cc26a8489d971f5b0e2cdaa524959b20b3da12bb997caa8cad1 WatchSource:0}: Error finding container 876d2bff87d74cc26a8489d971f5b0e2cdaa524959b20b3da12bb997caa8cad1: Status 404 returned error can't find the container with id 876d2bff87d74cc26a8489d971f5b0e2cdaa524959b20b3da12bb997caa8cad1 Dec 03 16:31:17 crc kubenswrapper[5002]: W1203 16:31:17.353891 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-0e218b7b3f51b1090e9983fcbc7d6ccaef61ffeec36a78cb037a3eec1d2a70d0 WatchSource:0}: Error finding container 0e218b7b3f51b1090e9983fcbc7d6ccaef61ffeec36a78cb037a3eec1d2a70d0: Status 404 returned error can't find the container with id 0e218b7b3f51b1090e9983fcbc7d6ccaef61ffeec36a78cb037a3eec1d2a70d0 Dec 03 16:31:17 crc kubenswrapper[5002]: W1203 16:31:17.360941 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-c3f9256bb9335a9c84d1b7a82f870eb517c66b50b2c6864f9b87c1cc9c872f49 WatchSource:0}: Error finding container c3f9256bb9335a9c84d1b7a82f870eb517c66b50b2c6864f9b87c1cc9c872f49: Status 404 returned error can't find the container with id c3f9256bb9335a9c84d1b7a82f870eb517c66b50b2c6864f9b87c1cc9c872f49 Dec 03 16:31:17 crc kubenswrapper[5002]: E1203 16:31:17.376330 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" interval="800ms" Dec 03 16:31:17 crc kubenswrapper[5002]: W1203 16:31:17.569177 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.155:6443: connect: connection refused Dec 03 16:31:17 crc kubenswrapper[5002]: E1203 16:31:17.569832 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.155:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:31:17 crc kubenswrapper[5002]: W1203 16:31:17.580071 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.155:6443: connect: connection refused Dec 03 16:31:17 crc kubenswrapper[5002]: E1203 16:31:17.580122 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.155:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.604672 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.605894 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.605929 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.605938 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.605962 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 16:31:17 crc kubenswrapper[5002]: E1203 16:31:17.606405 5002 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.155:6443: connect: connection refused" node="crc" Dec 03 16:31:17 crc kubenswrapper[5002]: W1203 16:31:17.699235 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.155:6443: connect: connection refused Dec 03 16:31:17 crc kubenswrapper[5002]: E1203 16:31:17.699343 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.155:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.769381 5002 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.155:6443: connect: connection refused Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.844654 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108"} Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.844868 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c3f9256bb9335a9c84d1b7a82f870eb517c66b50b2c6864f9b87c1cc9c872f49"} Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.846813 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df" exitCode=0 Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.846906 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df"} Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.846935 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0e218b7b3f51b1090e9983fcbc7d6ccaef61ffeec36a78cb037a3eec1d2a70d0"} Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.847082 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.848608 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.848640 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.848650 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.848670 5002 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7b0d9d9c552e6a449339700aa894b02553c976b10baf22247fe9374ca96a5922" exitCode=0 Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.848725 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7b0d9d9c552e6a449339700aa894b02553c976b10baf22247fe9374ca96a5922"} Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.848766 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"876d2bff87d74cc26a8489d971f5b0e2cdaa524959b20b3da12bb997caa8cad1"} Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.848856 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.850658 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.851197 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.851223 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.851232 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.851453 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.851514 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.851540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.852270 5002 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2" exitCode=0 Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.852283 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2"} Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.852313 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"0dda59d6e0527043208c4d74336e4bd80951f53a867d1c4903e2111183d25f10"} Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.852374 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.853037 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.853062 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.853072 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.854101 5002 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e" exitCode=0 Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.854125 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e"} Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.854139 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"518cb37aee9b36ea4f4090f7cd22846393640dabde41303cc8f914b25cfde9f4"} Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.854190 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.854856 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.854886 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:17 crc kubenswrapper[5002]: I1203 16:31:17.854896 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:18 crc kubenswrapper[5002]: E1203 16:31:18.178957 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" interval="1.6s" Dec 03 16:31:18 crc kubenswrapper[5002]: W1203 16:31:18.271526 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.155:6443: connect: connection refused Dec 03 16:31:18 crc kubenswrapper[5002]: E1203 16:31:18.271631 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.155:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.407126 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.409957 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.410010 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.410023 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.410063 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.771056 5002 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.860646 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273"} Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.860718 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99"} Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.860735 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4"} Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.860769 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe"} Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.862903 5002 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="857fadde15127c9c8e932d6510964b56dc507e6b54d4a5baa85ccd9baf19890d" exitCode=0 Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.862971 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"857fadde15127c9c8e932d6510964b56dc507e6b54d4a5baa85ccd9baf19890d"} Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.863166 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.866939 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.866991 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.867007 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.870623 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"82cc1b69d0fae8296c84934db54ef2ac95c589188d7060be0dd5d0a55429e24a"} Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.870733 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.872286 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.872335 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.872349 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.875729 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e"} Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.875806 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0"} Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.875821 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010"} Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.875969 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.876711 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.876779 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.876791 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.881059 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe"} Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.881087 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1"} Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.881117 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f"} Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.881561 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.883365 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.883419 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:18 crc kubenswrapper[5002]: I1203 16:31:18.883433 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.373519 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.888224 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1"} Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.888378 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.889540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.889566 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.889574 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.892630 5002 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="19806e31b5403348e56008ca0902ca3d6981cb6dbab109abd904f2c2ff818910" exitCode=0 Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.892711 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.892741 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"19806e31b5403348e56008ca0902ca3d6981cb6dbab109abd904f2c2ff818910"} Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.892934 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.893100 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.893235 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.893425 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.893452 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.893461 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.894654 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.894672 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.894681 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.895236 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.895303 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:19 crc kubenswrapper[5002]: I1203 16:31:19.895329 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.901596 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1c9058167cd59ece5b08916606b609b7cd2314d4025c5ee345841f204a0fe416"} Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.901659 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bd3ebeff2a55c61214875707374f77a32c72f1848105c4e344003ae0ded615db"} Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.901675 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6bfe38d76dc3e876cff4b8588aa5e6adada158718504596512cc61001ded6f2e"} Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.901777 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.901787 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.901825 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.901775 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.903213 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.903244 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.903253 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.903386 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.903430 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.903446 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.903566 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.903627 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:20 crc kubenswrapper[5002]: I1203 16:31:20.903652 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:21 crc kubenswrapper[5002]: I1203 16:31:21.908182 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"62450cc49c9f2ee66c14c1a8450fc0465d94fde6713e77090b7d2b5fe935f76f"} Dec 03 16:31:21 crc kubenswrapper[5002]: I1203 16:31:21.908241 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7de3be329b4194b9382ba166fa438fa32fc557ef1cd87f79b49f7eeb4ff915b4"} Dec 03 16:31:21 crc kubenswrapper[5002]: I1203 16:31:21.908297 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:21 crc kubenswrapper[5002]: I1203 16:31:21.908305 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:21 crc kubenswrapper[5002]: I1203 16:31:21.909477 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:21 crc kubenswrapper[5002]: I1203 16:31:21.909502 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:21 crc kubenswrapper[5002]: I1203 16:31:21.909516 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:21 crc kubenswrapper[5002]: I1203 16:31:21.909628 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:21 crc kubenswrapper[5002]: I1203 16:31:21.909681 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:21 crc kubenswrapper[5002]: I1203 16:31:21.909698 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:21 crc kubenswrapper[5002]: I1203 16:31:21.930541 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:22 crc kubenswrapper[5002]: I1203 16:31:22.374011 5002 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 16:31:22 crc kubenswrapper[5002]: I1203 16:31:22.374125 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 16:31:22 crc kubenswrapper[5002]: I1203 16:31:22.778809 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 03 16:31:22 crc kubenswrapper[5002]: I1203 16:31:22.911128 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:22 crc kubenswrapper[5002]: I1203 16:31:22.914185 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:22 crc kubenswrapper[5002]: I1203 16:31:22.915160 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:22 crc kubenswrapper[5002]: I1203 16:31:22.915217 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:22 crc kubenswrapper[5002]: I1203 16:31:22.915311 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:22 crc kubenswrapper[5002]: I1203 16:31:22.916915 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:22 crc kubenswrapper[5002]: I1203 16:31:22.916990 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:22 crc kubenswrapper[5002]: I1203 16:31:22.917019 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:23 crc kubenswrapper[5002]: I1203 16:31:23.913180 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:23 crc kubenswrapper[5002]: I1203 16:31:23.915102 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:23 crc kubenswrapper[5002]: I1203 16:31:23.915143 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:23 crc kubenswrapper[5002]: I1203 16:31:23.915152 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:24 crc kubenswrapper[5002]: I1203 16:31:24.290201 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:24 crc kubenswrapper[5002]: I1203 16:31:24.290429 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:24 crc kubenswrapper[5002]: I1203 16:31:24.292073 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:24 crc kubenswrapper[5002]: I1203 16:31:24.292132 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:24 crc kubenswrapper[5002]: I1203 16:31:24.292145 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:24 crc kubenswrapper[5002]: I1203 16:31:24.909043 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 03 16:31:24 crc kubenswrapper[5002]: I1203 16:31:24.915255 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:24 crc kubenswrapper[5002]: I1203 16:31:24.916353 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:24 crc kubenswrapper[5002]: I1203 16:31:24.916394 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:24 crc kubenswrapper[5002]: I1203 16:31:24.916424 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:26 crc kubenswrapper[5002]: I1203 16:31:26.615954 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:26 crc kubenswrapper[5002]: I1203 16:31:26.616200 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:26 crc kubenswrapper[5002]: I1203 16:31:26.617793 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:26 crc kubenswrapper[5002]: I1203 16:31:26.617844 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:26 crc kubenswrapper[5002]: I1203 16:31:26.617856 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:26 crc kubenswrapper[5002]: I1203 16:31:26.620860 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:26 crc kubenswrapper[5002]: E1203 16:31:26.906531 5002 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 16:31:26 crc kubenswrapper[5002]: I1203 16:31:26.920813 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:26 crc kubenswrapper[5002]: I1203 16:31:26.921809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:26 crc kubenswrapper[5002]: I1203 16:31:26.921875 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:26 crc kubenswrapper[5002]: I1203 16:31:26.921897 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:27 crc kubenswrapper[5002]: I1203 16:31:27.055544 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:27 crc kubenswrapper[5002]: I1203 16:31:27.059372 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:27 crc kubenswrapper[5002]: I1203 16:31:27.923486 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:27 crc kubenswrapper[5002]: I1203 16:31:27.924597 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:27 crc kubenswrapper[5002]: I1203 16:31:27.924640 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:27 crc kubenswrapper[5002]: I1203 16:31:27.924650 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:28 crc kubenswrapper[5002]: I1203 16:31:28.349878 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:28 crc kubenswrapper[5002]: E1203 16:31:28.411352 5002 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 03 16:31:28 crc kubenswrapper[5002]: I1203 16:31:28.770013 5002 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 03 16:31:28 crc kubenswrapper[5002]: E1203 16:31:28.773466 5002 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 03 16:31:28 crc kubenswrapper[5002]: I1203 16:31:28.926764 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:28 crc kubenswrapper[5002]: I1203 16:31:28.928370 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:28 crc kubenswrapper[5002]: I1203 16:31:28.928426 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:28 crc kubenswrapper[5002]: I1203 16:31:28.928445 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:29 crc kubenswrapper[5002]: W1203 16:31:29.258907 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 03 16:31:29 crc kubenswrapper[5002]: I1203 16:31:29.259477 5002 trace.go:236] Trace[1694878080]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 16:31:19.257) (total time: 10001ms): Dec 03 16:31:29 crc kubenswrapper[5002]: Trace[1694878080]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (16:31:29.258) Dec 03 16:31:29 crc kubenswrapper[5002]: Trace[1694878080]: [10.001494033s] [10.001494033s] END Dec 03 16:31:29 crc kubenswrapper[5002]: E1203 16:31:29.259918 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 03 16:31:29 crc kubenswrapper[5002]: E1203 16:31:29.780808 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 03 16:31:29 crc kubenswrapper[5002]: I1203 16:31:29.929427 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:29 crc kubenswrapper[5002]: I1203 16:31:29.930772 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:29 crc kubenswrapper[5002]: I1203 16:31:29.930909 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:29 crc kubenswrapper[5002]: I1203 16:31:29.930974 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:30 crc kubenswrapper[5002]: I1203 16:31:30.012411 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:30 crc kubenswrapper[5002]: I1203 16:31:30.014070 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:30 crc kubenswrapper[5002]: I1203 16:31:30.014117 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:30 crc kubenswrapper[5002]: I1203 16:31:30.014131 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:30 crc kubenswrapper[5002]: I1203 16:31:30.014165 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 16:31:30 crc kubenswrapper[5002]: W1203 16:31:30.019161 5002 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 03 16:31:30 crc kubenswrapper[5002]: I1203 16:31:30.019301 5002 trace.go:236] Trace[202502209]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 16:31:20.017) (total time: 10002ms): Dec 03 16:31:30 crc kubenswrapper[5002]: Trace[202502209]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (16:31:30.019) Dec 03 16:31:30 crc kubenswrapper[5002]: Trace[202502209]: [10.002220364s] [10.002220364s] END Dec 03 16:31:30 crc kubenswrapper[5002]: E1203 16:31:30.019331 5002 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 03 16:31:30 crc kubenswrapper[5002]: I1203 16:31:30.179380 5002 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 16:31:30 crc kubenswrapper[5002]: I1203 16:31:30.179461 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 16:31:30 crc kubenswrapper[5002]: I1203 16:31:30.184755 5002 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 16:31:30 crc kubenswrapper[5002]: I1203 16:31:30.184831 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 16:31:31 crc kubenswrapper[5002]: I1203 16:31:31.936492 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:31 crc kubenswrapper[5002]: I1203 16:31:31.936688 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:31 crc kubenswrapper[5002]: I1203 16:31:31.938563 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:31 crc kubenswrapper[5002]: I1203 16:31:31.938632 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:31 crc kubenswrapper[5002]: I1203 16:31:31.938649 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:31 crc kubenswrapper[5002]: I1203 16:31:31.940620 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:32 crc kubenswrapper[5002]: I1203 16:31:32.374924 5002 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 16:31:32 crc kubenswrapper[5002]: I1203 16:31:32.375034 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 16:31:32 crc kubenswrapper[5002]: I1203 16:31:32.926507 5002 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 16:31:32 crc kubenswrapper[5002]: I1203 16:31:32.936939 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:32 crc kubenswrapper[5002]: I1203 16:31:32.938621 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:32 crc kubenswrapper[5002]: I1203 16:31:32.938684 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:32 crc kubenswrapper[5002]: I1203 16:31:32.938717 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:32 crc kubenswrapper[5002]: I1203 16:31:32.943764 5002 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 03 16:31:33 crc kubenswrapper[5002]: I1203 16:31:33.666170 5002 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 16:31:34 crc kubenswrapper[5002]: I1203 16:31:34.724199 5002 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 16:31:34 crc kubenswrapper[5002]: I1203 16:31:34.932057 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 03 16:31:34 crc kubenswrapper[5002]: I1203 16:31:34.932228 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:34 crc kubenswrapper[5002]: I1203 16:31:34.933433 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:34 crc kubenswrapper[5002]: I1203 16:31:34.933477 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:34 crc kubenswrapper[5002]: I1203 16:31:34.933491 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:34 crc kubenswrapper[5002]: I1203 16:31:34.943897 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 03 16:31:34 crc kubenswrapper[5002]: I1203 16:31:34.944003 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:34 crc kubenswrapper[5002]: I1203 16:31:34.944684 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:34 crc kubenswrapper[5002]: I1203 16:31:34.944713 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:34 crc kubenswrapper[5002]: I1203 16:31:34.944723 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.174914 5002 trace.go:236] Trace[1462875939]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 16:31:20.206) (total time: 14968ms): Dec 03 16:31:35 crc kubenswrapper[5002]: Trace[1462875939]: ---"Objects listed" error: 14968ms (16:31:35.174) Dec 03 16:31:35 crc kubenswrapper[5002]: Trace[1462875939]: [14.968724868s] [14.968724868s] END Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.174942 5002 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.176793 5002 trace.go:236] Trace[1035022005]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 16:31:20.589) (total time: 14587ms): Dec 03 16:31:35 crc kubenswrapper[5002]: Trace[1035022005]: ---"Objects listed" error: 14587ms (16:31:35.176) Dec 03 16:31:35 crc kubenswrapper[5002]: Trace[1035022005]: [14.587211735s] [14.587211735s] END Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.176821 5002 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.176828 5002 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.178926 5002 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.325762 5002 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:49012->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.325763 5002 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:49028->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.325833 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:49012->192.168.126.11:17697: read: connection reset by peer" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.325859 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:49028->192.168.126.11:17697: read: connection reset by peer" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.326151 5002 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.326172 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.773119 5002 apiserver.go:52] "Watching apiserver" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.776107 5002 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.776599 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.777101 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.777176 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.777236 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.777355 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.777414 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.777475 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.777771 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.777795 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.777870 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.779054 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.779556 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.779587 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.779718 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.779961 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.780333 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.780410 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.780612 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.782271 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.802415 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.818427 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.832175 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.848901 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.860178 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.870875 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.875009 5002 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880456 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880545 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880568 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880613 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880640 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880666 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880689 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880713 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880736 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880781 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880806 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880826 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880873 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880897 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.880929 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.881349 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882437 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882471 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882513 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882559 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882586 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882628 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882672 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882724 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882793 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882824 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882886 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882929 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882936 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.882969 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883133 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883139 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883177 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883151 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883290 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883335 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883367 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883394 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883424 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883434 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883450 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883471 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883500 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883529 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883555 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883580 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883621 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883658 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883683 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883712 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883738 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883776 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883801 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883819 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883829 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883855 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883860 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883880 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883955 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.883984 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884005 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884032 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884058 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884083 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884083 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884106 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884229 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884251 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884283 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884337 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884361 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884388 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884421 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884450 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884477 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884466 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884501 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884530 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884555 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884575 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884602 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884628 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884652 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884674 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884681 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884771 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884842 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884886 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884921 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884925 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.884988 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885027 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885056 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885082 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885111 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885133 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885140 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885205 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885236 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885272 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885314 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885348 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885385 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885367 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885421 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885462 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885492 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885527 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885565 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885595 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885609 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885632 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885671 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885706 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.885729 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886038 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886086 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886117 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886168 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886196 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886918 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886960 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886997 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887031 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887065 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887102 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887140 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887173 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887215 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887255 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887289 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887326 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887366 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887401 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887435 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887472 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887509 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887544 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887578 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887612 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887640 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887672 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887707 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887762 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887797 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887829 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887862 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887892 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887924 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887958 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887994 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888028 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888055 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888080 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888100 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888132 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888161 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888190 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888217 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888245 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888276 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888304 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888362 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888396 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888428 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888466 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888502 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888531 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888562 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888594 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888627 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888657 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888688 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888722 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888769 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.889908 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.906320 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.906519 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.906649 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.906817 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.906863 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.906938 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886037 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886243 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886563 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886629 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.886882 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887088 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887120 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.908684 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:31:36.408648818 +0000 UTC m=+19.822470716 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887295 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.887450 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888060 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888677 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888727 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888853 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.888959 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.889023 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.907813 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.889111 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.889408 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.889506 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.889509 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.889541 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.889679 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.889867 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.890131 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.890140 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.890192 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.890903 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.891332 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.891383 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.892122 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.892368 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.892623 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.893219 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.893272 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.893735 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.894242 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.906197 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.906188 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.906428 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.907331 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.907475 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.907771 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.907804 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.907815 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.908112 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.908280 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.908402 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.908561 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.909462 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.909494 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.909526 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.909764 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.909888 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.909935 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.909963 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.910002 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.910026 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.910050 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.910053 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.910118 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.910375 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.910471 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.910132 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.910151 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.911258 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.910177 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.910525 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.910929 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.911312 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.911514 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.911979 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.912722 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.912189 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.912336 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.912351 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.912391 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.912441 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.913366 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.913868 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.913870 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.914076 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.914213 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.917523 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.917686 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.934060 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.934512 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.934779 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.934900 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.935176 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.935245 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.935620 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.935772 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.935990 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.935997 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.936407 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.936460 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.913766 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.936623 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.936666 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.936916 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.912811 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.936965 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937019 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937055 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937087 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937111 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937136 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937165 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937168 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937188 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937218 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937247 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937278 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937303 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937333 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937366 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937313 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937390 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937638 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937645 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937710 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937810 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937861 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937908 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937919 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937961 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.937971 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.938010 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.938033 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.938224 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.938314 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.938343 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.938479 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.938647 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.938693 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.938693 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.955958 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.955992 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.956018 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957416 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957627 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957672 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957696 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957718 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957779 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957820 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957848 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957866 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957887 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957916 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957945 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958130 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958163 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958188 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958341 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958355 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958367 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958386 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958398 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958410 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958425 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958444 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958455 5002 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958467 5002 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958488 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958503 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958517 5002 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958530 5002 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958544 5002 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958558 5002 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958571 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958581 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958590 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958601 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958610 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958620 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958631 5002 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958646 5002 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958656 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958666 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958676 5002 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958687 5002 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958697 5002 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958707 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958719 5002 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958729 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958739 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958765 5002 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958779 5002 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958800 5002 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958813 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958826 5002 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958843 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958852 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958860 5002 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958869 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958879 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958889 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958899 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958908 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958917 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958925 5002 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958934 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958943 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958952 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958962 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958971 5002 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958981 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958990 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958999 5002 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959008 5002 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959025 5002 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959041 5002 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959049 5002 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959058 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959067 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959080 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959089 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959098 5002 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959107 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959115 5002 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959124 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959133 5002 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959142 5002 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959153 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959162 5002 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959173 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959183 5002 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959556 5002 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959576 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959589 5002 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959601 5002 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959622 5002 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959638 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959651 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959665 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959678 5002 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959696 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959708 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959720 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959731 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959740 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959769 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959785 5002 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959802 5002 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959812 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959822 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959831 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959840 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959849 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959860 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959869 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959880 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959890 5002 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959902 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959912 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959922 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959932 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959942 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959953 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959963 5002 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959972 5002 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959983 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959993 5002 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960008 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960019 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960028 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960037 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960047 5002 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960057 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960066 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960075 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960084 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960095 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960105 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960113 5002 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960122 5002 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960136 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960154 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.939560 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.939769 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.939952 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.953929 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.954327 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.954339 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.954549 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.961238 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.954782 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.955052 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.955398 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.955802 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.955837 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.955880 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.955894 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.955987 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.961564 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.961657 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:36.461634471 +0000 UTC m=+19.875456559 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.962538 5002 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.962700 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.956269 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.956378 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.956671 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.956678 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.956696 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.956770 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.956940 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.956976 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.939546 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957012 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957111 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957227 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957258 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.954717 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957473 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.955364 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.955672 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957523 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.957543 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958164 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958180 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958221 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958388 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958368 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958457 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958489 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958537 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958651 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958820 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.958866 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959131 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959394 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959717 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959847 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959930 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.959956 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960736 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.960741 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.963259 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.963492 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:36.46346731 +0000 UTC m=+19.877289198 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.964296 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.964354 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.964403 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.965732 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.967483 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.968032 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.969159 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.978765 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.979606 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.980648 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.980786 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.980885 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.980943 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.981086 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.981103 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.981554 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:36.481035955 +0000 UTC m=+19.894857843 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:35 crc kubenswrapper[5002]: E1203 16:31:35.981720 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:36.481701593 +0000 UTC m=+19.895523591 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.982533 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1" exitCode=255 Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.982647 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1"} Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.983149 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.984372 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.985244 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.985529 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.985666 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.985546 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.986299 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.987132 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.987863 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.989872 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.990897 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.991134 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.994575 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.994862 5002 scope.go:117] "RemoveContainer" containerID="836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1" Dec 03 16:31:35 crc kubenswrapper[5002]: I1203 16:31:35.998658 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.001450 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.005168 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.012042 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.026186 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.042525 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.058963 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061007 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061062 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061138 5002 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061155 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061168 5002 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061181 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061194 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061209 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061222 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061234 5002 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061245 5002 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061257 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061272 5002 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061284 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061296 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061308 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061320 5002 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061331 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061342 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061358 5002 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061371 5002 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061384 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061496 5002 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061530 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061535 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061554 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061622 5002 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061632 5002 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061657 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061669 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061677 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061690 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061699 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061707 5002 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061716 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061738 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061763 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061774 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061784 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061795 5002 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061823 5002 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061836 5002 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061845 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061854 5002 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061863 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061874 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061901 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061911 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061921 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061932 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061941 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061951 5002 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061986 5002 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.061996 5002 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062008 5002 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062018 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062027 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062037 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062064 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062073 5002 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062082 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062091 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062109 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062118 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062141 5002 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062151 5002 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062161 5002 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062172 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062181 5002 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062191 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062216 5002 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062225 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.062234 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.069800 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.096426 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.105637 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.112534 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:31:36 crc kubenswrapper[5002]: W1203 16:31:36.131397 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-77365359f81ca913a425533661cbc4bbf77d375f0b280a4eb2f3467f8739fa07 WatchSource:0}: Error finding container 77365359f81ca913a425533661cbc4bbf77d375f0b280a4eb2f3467f8739fa07: Status 404 returned error can't find the container with id 77365359f81ca913a425533661cbc4bbf77d375f0b280a4eb2f3467f8739fa07 Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.465909 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.466022 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.466078 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.466244 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.466317 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:37.466298443 +0000 UTC m=+20.880120331 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.466550 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.466631 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:31:37.466602681 +0000 UTC m=+20.880424569 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.466675 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:37.466649862 +0000 UTC m=+20.880471750 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.573659 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.573821 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.573883 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.573920 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.573931 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.573995 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:37.573976416 +0000 UTC m=+20.987798304 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.573995 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.574031 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.574056 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.574151 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:37.5741042 +0000 UTC m=+20.987926118 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.840221 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:36 crc kubenswrapper[5002]: E1203 16:31:36.840395 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.844240 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.844917 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.846415 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.847132 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.848243 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.848849 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.849410 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.850399 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.851058 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.852046 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.852530 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.853515 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.854018 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.854494 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.855335 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.855842 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.856759 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.857131 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.857646 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.858535 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.858985 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.859899 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.860383 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.865061 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.865475 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.866496 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.867132 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.868107 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.868719 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.869543 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.869991 5002 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.870085 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.872041 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.872528 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.873097 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.874705 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.875669 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.876189 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.877358 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.878143 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.878960 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.879539 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.880532 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.881612 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.882136 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.883191 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.884180 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.884316 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.885322 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.885796 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.886222 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.887043 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.887530 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.888803 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.889915 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.900477 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.918079 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.939364 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.951157 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.967101 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.979354 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.988123 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.990109 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5"} Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.990505 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.991239 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"77365359f81ca913a425533661cbc4bbf77d375f0b280a4eb2f3467f8739fa07"} Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.993235 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384"} Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.993286 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09"} Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.993298 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a1db125a48c7e1e2c775e248b19edcd2012c2a5d98af918cd7561fa459e03b29"} Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.994598 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a"} Dec 03 16:31:36 crc kubenswrapper[5002]: I1203 16:31:36.994848 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"708f5108c342cca45e50e769b098c6d9f2c79883777ea7085c7c89db9c6e5bf0"} Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.005157 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.015415 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.027225 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.040528 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.053910 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.069224 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.085164 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.100400 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.114218 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.141964 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.162239 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.177876 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.197829 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.213401 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.485611 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.485720 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.485806 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:31:39.485776115 +0000 UTC m=+22.899598013 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.485825 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.485837 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.485873 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:39.485862648 +0000 UTC m=+22.899684536 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.485980 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.486062 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:39.486040482 +0000 UTC m=+22.899862370 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.587073 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.587313 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.587310 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.587374 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.587417 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.587437 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.587501 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:39.58747822 +0000 UTC m=+23.001300168 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.587375 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.587652 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.587706 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:39.587688236 +0000 UTC m=+23.001510124 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.839622 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.839827 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:37 crc kubenswrapper[5002]: I1203 16:31:37.839638 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:37 crc kubenswrapper[5002]: E1203 16:31:37.840259 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.379260 5002 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.381188 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.381240 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.381253 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.381364 5002 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.388733 5002 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.389022 5002 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.390100 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.390134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.390145 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.390162 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.390175 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:38Z","lastTransitionTime":"2025-12-03T16:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:38 crc kubenswrapper[5002]: E1203 16:31:38.406975 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:38Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.410427 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.410464 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.410474 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.410488 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.410500 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:38Z","lastTransitionTime":"2025-12-03T16:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:38 crc kubenswrapper[5002]: E1203 16:31:38.420949 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:38Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.425174 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.425211 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.425223 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.425239 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.425252 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:38Z","lastTransitionTime":"2025-12-03T16:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:38 crc kubenswrapper[5002]: E1203 16:31:38.437087 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:38Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.441187 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.441224 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.441235 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.441251 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.441264 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:38Z","lastTransitionTime":"2025-12-03T16:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:38 crc kubenswrapper[5002]: E1203 16:31:38.467773 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:38Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.472139 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.472175 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.472185 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.472201 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.472247 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:38Z","lastTransitionTime":"2025-12-03T16:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:38 crc kubenswrapper[5002]: E1203 16:31:38.487668 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:38Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:38 crc kubenswrapper[5002]: E1203 16:31:38.487834 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.489729 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.489794 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.489808 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.489840 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.489853 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:38Z","lastTransitionTime":"2025-12-03T16:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.593108 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.593171 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.593205 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.593235 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.593252 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:38Z","lastTransitionTime":"2025-12-03T16:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.696926 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.696966 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.696979 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.696998 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.697009 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:38Z","lastTransitionTime":"2025-12-03T16:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.799204 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.799264 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.799278 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.799302 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.799317 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:38Z","lastTransitionTime":"2025-12-03T16:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.839604 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:38 crc kubenswrapper[5002]: E1203 16:31:38.839830 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.901996 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.902029 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.902037 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.902050 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:38 crc kubenswrapper[5002]: I1203 16:31:38.902060 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:38Z","lastTransitionTime":"2025-12-03T16:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.004450 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.004494 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.004505 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.004522 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.004537 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:39Z","lastTransitionTime":"2025-12-03T16:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.107494 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.107544 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.107555 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.107571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.107580 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:39Z","lastTransitionTime":"2025-12-03T16:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.210451 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.210493 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.210504 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.210525 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.210536 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:39Z","lastTransitionTime":"2025-12-03T16:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.313173 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.313230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.313244 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.313262 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.313273 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:39Z","lastTransitionTime":"2025-12-03T16:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.378404 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.383117 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.392116 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.397032 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.405726 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.415529 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.415583 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.415594 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.415612 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.415623 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:39Z","lastTransitionTime":"2025-12-03T16:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.420207 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.432591 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.445081 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.458604 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.483993 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.501957 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.502054 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.502093 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.502188 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:31:43.502164451 +0000 UTC m=+26.915986339 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.502281 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.502405 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:43.502377557 +0000 UTC m=+26.916199495 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.502289 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.502472 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:43.502460199 +0000 UTC m=+26.916282107 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.512230 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.521251 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.521295 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.521307 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.521325 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.521342 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:39Z","lastTransitionTime":"2025-12-03T16:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.527841 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.540695 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.551554 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.563216 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.577622 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.594262 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.602579 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.602670 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.602813 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.602845 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.602860 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.602860 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.602882 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.602897 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.602926 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:43.60290145 +0000 UTC m=+27.016723488 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.602954 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:43.602938021 +0000 UTC m=+27.016759909 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.607555 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:39Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.624559 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.624617 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.624630 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.624649 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.624661 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:39Z","lastTransitionTime":"2025-12-03T16:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.728023 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.728123 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.728139 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.728161 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.728174 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:39Z","lastTransitionTime":"2025-12-03T16:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.831450 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.831507 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.831518 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.831536 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.831549 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:39Z","lastTransitionTime":"2025-12-03T16:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.839715 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.839775 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.839851 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:39 crc kubenswrapper[5002]: E1203 16:31:39.840066 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.934436 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.934494 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.934504 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.934519 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:39 crc kubenswrapper[5002]: I1203 16:31:39.934529 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:39Z","lastTransitionTime":"2025-12-03T16:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:40 crc kubenswrapper[5002]: E1203 16:31:40.008362 5002 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.037525 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.037562 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.037571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.037585 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.037594 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:40Z","lastTransitionTime":"2025-12-03T16:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.140362 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.140419 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.140431 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.140448 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.140461 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:40Z","lastTransitionTime":"2025-12-03T16:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.243356 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.243402 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.243411 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.243427 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.243440 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:40Z","lastTransitionTime":"2025-12-03T16:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.345858 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.345920 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.345938 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.345958 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.345970 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:40Z","lastTransitionTime":"2025-12-03T16:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.449435 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.449501 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.449518 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.449541 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.449560 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:40Z","lastTransitionTime":"2025-12-03T16:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.551950 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.552003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.552035 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.552053 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.552065 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:40Z","lastTransitionTime":"2025-12-03T16:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.654507 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.654556 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.654576 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.654593 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.654610 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:40Z","lastTransitionTime":"2025-12-03T16:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.758023 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.758079 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.758091 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.758115 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.758126 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:40Z","lastTransitionTime":"2025-12-03T16:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.839931 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:40 crc kubenswrapper[5002]: E1203 16:31:40.840223 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.861546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.861614 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.861638 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.861666 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.861690 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:40Z","lastTransitionTime":"2025-12-03T16:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.964340 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.964653 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.964662 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.964680 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:40 crc kubenswrapper[5002]: I1203 16:31:40.964690 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:40Z","lastTransitionTime":"2025-12-03T16:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.067619 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.067663 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.067672 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.067687 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.067700 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:41Z","lastTransitionTime":"2025-12-03T16:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.170888 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.170946 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.170956 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.170980 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.170994 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:41Z","lastTransitionTime":"2025-12-03T16:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.273890 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.273936 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.273953 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.273978 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.273996 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:41Z","lastTransitionTime":"2025-12-03T16:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.377831 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.377884 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.377895 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.377920 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.377934 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:41Z","lastTransitionTime":"2025-12-03T16:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.480800 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.481684 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.481780 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.481811 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.481837 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:41Z","lastTransitionTime":"2025-12-03T16:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.584807 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.584863 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.584876 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.584895 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.584909 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:41Z","lastTransitionTime":"2025-12-03T16:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.687195 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.687257 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.687272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.687296 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.687310 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:41Z","lastTransitionTime":"2025-12-03T16:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.788980 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.789014 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.789023 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.789036 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.789044 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:41Z","lastTransitionTime":"2025-12-03T16:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.839423 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:41 crc kubenswrapper[5002]: E1203 16:31:41.839553 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.839640 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:41 crc kubenswrapper[5002]: E1203 16:31:41.839679 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.872200 5002 csr.go:261] certificate signing request csr-zbctv is approved, waiting to be issued Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.890853 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.890889 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.890900 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.890931 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.890941 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:41Z","lastTransitionTime":"2025-12-03T16:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.917336 5002 csr.go:257] certificate signing request csr-zbctv is issued Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.918629 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-6kd7v"] Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.919118 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6kd7v" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.921939 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.922222 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.922424 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.922541 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.927504 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-ldz4j"] Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.927947 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-ldz4j" Dec 03 16:31:41 crc kubenswrapper[5002]: W1203 16:31:41.930591 5002 reflector.go:561] object-"openshift-dns"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 03 16:31:41 crc kubenswrapper[5002]: E1203 16:31:41.930654 5002 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.931011 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.931439 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.938351 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.952353 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.963295 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.990100 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.999819 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.999863 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:41 crc kubenswrapper[5002]: I1203 16:31:41.999872 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:41.999891 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:41.999904 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:41Z","lastTransitionTime":"2025-12-03T16:31:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.035243 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c9009aa9-e6e5-41d5-800d-c3680572f71d-serviceca\") pod \"node-ca-6kd7v\" (UID: \"c9009aa9-e6e5-41d5-800d-c3680572f71d\") " pod="openshift-image-registry/node-ca-6kd7v" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.035305 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfmnm\" (UniqueName: \"kubernetes.io/projected/c9009aa9-e6e5-41d5-800d-c3680572f71d-kube-api-access-jfmnm\") pod \"node-ca-6kd7v\" (UID: \"c9009aa9-e6e5-41d5-800d-c3680572f71d\") " pod="openshift-image-registry/node-ca-6kd7v" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.035332 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/21cef49c-bd93-40d7-a2f7-0c8338455ee6-hosts-file\") pod \"node-resolver-ldz4j\" (UID: \"21cef49c-bd93-40d7-a2f7-0c8338455ee6\") " pod="openshift-dns/node-resolver-ldz4j" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.035352 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c9009aa9-e6e5-41d5-800d-c3680572f71d-host\") pod \"node-ca-6kd7v\" (UID: \"c9009aa9-e6e5-41d5-800d-c3680572f71d\") " pod="openshift-image-registry/node-ca-6kd7v" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.035389 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48mk6\" (UniqueName: \"kubernetes.io/projected/21cef49c-bd93-40d7-a2f7-0c8338455ee6-kube-api-access-48mk6\") pod \"node-resolver-ldz4j\" (UID: \"21cef49c-bd93-40d7-a2f7-0c8338455ee6\") " pod="openshift-dns/node-resolver-ldz4j" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.040662 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.056792 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.073235 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.112683 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.112785 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.112802 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.112826 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.112872 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:42Z","lastTransitionTime":"2025-12-03T16:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.141205 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.141526 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c9009aa9-e6e5-41d5-800d-c3680572f71d-serviceca\") pod \"node-ca-6kd7v\" (UID: \"c9009aa9-e6e5-41d5-800d-c3680572f71d\") " pod="openshift-image-registry/node-ca-6kd7v" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.141574 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfmnm\" (UniqueName: \"kubernetes.io/projected/c9009aa9-e6e5-41d5-800d-c3680572f71d-kube-api-access-jfmnm\") pod \"node-ca-6kd7v\" (UID: \"c9009aa9-e6e5-41d5-800d-c3680572f71d\") " pod="openshift-image-registry/node-ca-6kd7v" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.141597 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c9009aa9-e6e5-41d5-800d-c3680572f71d-host\") pod \"node-ca-6kd7v\" (UID: \"c9009aa9-e6e5-41d5-800d-c3680572f71d\") " pod="openshift-image-registry/node-ca-6kd7v" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.141619 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/21cef49c-bd93-40d7-a2f7-0c8338455ee6-hosts-file\") pod \"node-resolver-ldz4j\" (UID: \"21cef49c-bd93-40d7-a2f7-0c8338455ee6\") " pod="openshift-dns/node-resolver-ldz4j" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.141656 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48mk6\" (UniqueName: \"kubernetes.io/projected/21cef49c-bd93-40d7-a2f7-0c8338455ee6-kube-api-access-48mk6\") pod \"node-resolver-ldz4j\" (UID: \"21cef49c-bd93-40d7-a2f7-0c8338455ee6\") " pod="openshift-dns/node-resolver-ldz4j" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.142140 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c9009aa9-e6e5-41d5-800d-c3680572f71d-host\") pod \"node-ca-6kd7v\" (UID: \"c9009aa9-e6e5-41d5-800d-c3680572f71d\") " pod="openshift-image-registry/node-ca-6kd7v" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.142206 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/21cef49c-bd93-40d7-a2f7-0c8338455ee6-hosts-file\") pod \"node-resolver-ldz4j\" (UID: \"21cef49c-bd93-40d7-a2f7-0c8338455ee6\") " pod="openshift-dns/node-resolver-ldz4j" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.143773 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c9009aa9-e6e5-41d5-800d-c3680572f71d-serviceca\") pod \"node-ca-6kd7v\" (UID: \"c9009aa9-e6e5-41d5-800d-c3680572f71d\") " pod="openshift-image-registry/node-ca-6kd7v" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.194625 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfmnm\" (UniqueName: \"kubernetes.io/projected/c9009aa9-e6e5-41d5-800d-c3680572f71d-kube-api-access-jfmnm\") pod \"node-ca-6kd7v\" (UID: \"c9009aa9-e6e5-41d5-800d-c3680572f71d\") " pod="openshift-image-registry/node-ca-6kd7v" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.208764 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.217883 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.217942 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.217955 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.218171 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.218189 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:42Z","lastTransitionTime":"2025-12-03T16:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.235718 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6kd7v" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.284018 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.323986 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.324017 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.324026 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.324041 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.324052 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:42Z","lastTransitionTime":"2025-12-03T16:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.341108 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.356574 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.378122 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.392354 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.405907 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.423869 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.426846 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.426917 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.426934 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.426959 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.426975 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:42Z","lastTransitionTime":"2025-12-03T16:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.461678 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.487087 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.500073 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.529559 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.529596 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.529605 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.529621 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.529630 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:42Z","lastTransitionTime":"2025-12-03T16:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.631944 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.632040 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.632054 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.632079 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.632092 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:42Z","lastTransitionTime":"2025-12-03T16:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.734540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.734594 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.734606 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.734626 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.734640 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:42Z","lastTransitionTime":"2025-12-03T16:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.839954 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:42 crc kubenswrapper[5002]: E1203 16:31:42.842783 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.845844 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.845912 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.845928 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.845958 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.845977 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:42Z","lastTransitionTime":"2025-12-03T16:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.914221 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-gjxps"] Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.914851 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.924512 5002 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-03 16:26:41 +0000 UTC, rotation deadline is 2026-08-29 02:49:02.813293735 +0000 UTC Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.924581 5002 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6442h17m19.888715434s for next certificate rotation Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.941311 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-bzb7f"] Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.942052 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.945002 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-8v6vc"] Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.945759 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.948709 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-run-k8s-cni-cncf-io\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.948818 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-etc-kubernetes\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.948908 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-hostroot\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.948987 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-multus-conf-dir\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.949079 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2de485fd-67c0-4be7-abb1-92509ea373da-cni-binary-copy\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.949150 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-run-multus-certs\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.949228 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-cnibin\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.949304 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-os-release\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.949384 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2de485fd-67c0-4be7-abb1-92509ea373da-multus-daemon-config\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.949462 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-system-cni-dir\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.949544 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-var-lib-cni-bin\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.949615 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-var-lib-kubelet\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.949685 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-multus-socket-dir-parent\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.949770 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-run-netns\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.949858 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-multus-cni-dir\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.949942 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-var-lib-cni-multus\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.950015 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qshft\" (UniqueName: \"kubernetes.io/projected/2de485fd-67c0-4be7-abb1-92509ea373da-kube-api-access-qshft\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.956136 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.957134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.957268 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.957360 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.957446 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.957522 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:42Z","lastTransitionTime":"2025-12-03T16:31:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.961189 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.961614 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.987182 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.987950 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.988369 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.988464 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.988546 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.988670 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.988709 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.989091 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 16:31:42 crc kubenswrapper[5002]: I1203 16:31:42.990563 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.011244 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6kd7v" event={"ID":"c9009aa9-e6e5-41d5-800d-c3680572f71d","Type":"ContainerStarted","Data":"00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e"} Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.011290 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6kd7v" event={"ID":"c9009aa9-e6e5-41d5-800d-c3680572f71d","Type":"ContainerStarted","Data":"9256744d23af1ee448d45552bc1b5166e321e8ffc171b7c5889cc94a246ae7c2"} Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.042163 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.050539 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c1d64ada-fbf9-4b0e-abb6-9b29bfec7309-rootfs\") pod \"machine-config-daemon-bzb7f\" (UID: \"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\") " pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.050861 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-run-netns\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.051015 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-var-lib-kubelet\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.051342 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-multus-cni-dir\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.051448 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qshft\" (UniqueName: \"kubernetes.io/projected/2de485fd-67c0-4be7-abb1-92509ea373da-kube-api-access-qshft\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.051549 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k865f\" (UniqueName: \"kubernetes.io/projected/b16d350b-678e-4558-ac4c-634fcdb2d2f7-kube-api-access-k865f\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.051661 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-run-k8s-cni-cncf-io\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.051776 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c1d64ada-fbf9-4b0e-abb6-9b29bfec7309-proxy-tls\") pod \"machine-config-daemon-bzb7f\" (UID: \"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\") " pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.051897 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-multus-conf-dir\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.051991 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b16d350b-678e-4558-ac4c-634fcdb2d2f7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.052099 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2de485fd-67c0-4be7-abb1-92509ea373da-cni-binary-copy\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.052197 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b16d350b-678e-4558-ac4c-634fcdb2d2f7-cnibin\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.052301 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b16d350b-678e-4558-ac4c-634fcdb2d2f7-os-release\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.052408 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2de485fd-67c0-4be7-abb1-92509ea373da-multus-daemon-config\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.052518 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgvqd\" (UniqueName: \"kubernetes.io/projected/c1d64ada-fbf9-4b0e-abb6-9b29bfec7309-kube-api-access-hgvqd\") pod \"machine-config-daemon-bzb7f\" (UID: \"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\") " pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.052613 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b16d350b-678e-4558-ac4c-634fcdb2d2f7-cni-binary-copy\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.052733 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-system-cni-dir\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.052854 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-var-lib-cni-bin\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.052951 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-multus-socket-dir-parent\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.053047 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b16d350b-678e-4558-ac4c-634fcdb2d2f7-system-cni-dir\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.053161 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-var-lib-cni-multus\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.053264 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-etc-kubernetes\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.053363 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c1d64ada-fbf9-4b0e-abb6-9b29bfec7309-mcd-auth-proxy-config\") pod \"machine-config-daemon-bzb7f\" (UID: \"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\") " pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.053458 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-hostroot\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.053546 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-os-release\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.053641 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-run-multus-certs\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.053763 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-cnibin\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.053874 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b16d350b-678e-4558-ac4c-634fcdb2d2f7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.054047 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-run-netns\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.054167 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-var-lib-kubelet\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.054434 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-multus-cni-dir\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.055427 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-etc-kubernetes\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.055498 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-multus-socket-dir-parent\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.055454 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-run-k8s-cni-cncf-io\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.055448 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-os-release\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.055454 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-hostroot\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.055560 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-multus-conf-dir\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.055577 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-system-cni-dir\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.055589 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-run-multus-certs\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.055585 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-var-lib-cni-multus\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.055600 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-host-var-lib-cni-bin\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.055719 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2de485fd-67c0-4be7-abb1-92509ea373da-cnibin\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.056355 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2de485fd-67c0-4be7-abb1-92509ea373da-multus-daemon-config\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.056361 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2de485fd-67c0-4be7-abb1-92509ea373da-cni-binary-copy\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.060318 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.060362 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.060372 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.060392 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.060404 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:43Z","lastTransitionTime":"2025-12-03T16:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.096194 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.096757 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qshft\" (UniqueName: \"kubernetes.io/projected/2de485fd-67c0-4be7-abb1-92509ea373da-kube-api-access-qshft\") pod \"multus-gjxps\" (UID: \"2de485fd-67c0-4be7-abb1-92509ea373da\") " pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.115395 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.127431 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.142087 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.154822 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b16d350b-678e-4558-ac4c-634fcdb2d2f7-cni-binary-copy\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.154871 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b16d350b-678e-4558-ac4c-634fcdb2d2f7-system-cni-dir\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.154901 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c1d64ada-fbf9-4b0e-abb6-9b29bfec7309-mcd-auth-proxy-config\") pod \"machine-config-daemon-bzb7f\" (UID: \"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\") " pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.154922 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b16d350b-678e-4558-ac4c-634fcdb2d2f7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.154943 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c1d64ada-fbf9-4b0e-abb6-9b29bfec7309-rootfs\") pod \"machine-config-daemon-bzb7f\" (UID: \"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\") " pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.154969 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k865f\" (UniqueName: \"kubernetes.io/projected/b16d350b-678e-4558-ac4c-634fcdb2d2f7-kube-api-access-k865f\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.155000 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c1d64ada-fbf9-4b0e-abb6-9b29bfec7309-proxy-tls\") pod \"machine-config-daemon-bzb7f\" (UID: \"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\") " pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.155015 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b16d350b-678e-4558-ac4c-634fcdb2d2f7-cnibin\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.155028 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b16d350b-678e-4558-ac4c-634fcdb2d2f7-os-release\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.155041 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b16d350b-678e-4558-ac4c-634fcdb2d2f7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.155056 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgvqd\" (UniqueName: \"kubernetes.io/projected/c1d64ada-fbf9-4b0e-abb6-9b29bfec7309-kube-api-access-hgvqd\") pod \"machine-config-daemon-bzb7f\" (UID: \"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\") " pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.155398 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b16d350b-678e-4558-ac4c-634fcdb2d2f7-cnibin\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.155511 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b16d350b-678e-4558-ac4c-634fcdb2d2f7-os-release\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.155540 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c1d64ada-fbf9-4b0e-abb6-9b29bfec7309-rootfs\") pod \"machine-config-daemon-bzb7f\" (UID: \"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\") " pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.156099 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b16d350b-678e-4558-ac4c-634fcdb2d2f7-system-cni-dir\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.156584 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b16d350b-678e-4558-ac4c-634fcdb2d2f7-cni-binary-copy\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.156641 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c1d64ada-fbf9-4b0e-abb6-9b29bfec7309-mcd-auth-proxy-config\") pod \"machine-config-daemon-bzb7f\" (UID: \"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\") " pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.156775 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b16d350b-678e-4558-ac4c-634fcdb2d2f7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.160444 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.160584 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c1d64ada-fbf9-4b0e-abb6-9b29bfec7309-proxy-tls\") pod \"machine-config-daemon-bzb7f\" (UID: \"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\") " pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.160639 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b16d350b-678e-4558-ac4c-634fcdb2d2f7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.165559 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.165614 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.165628 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.165648 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.165664 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:43Z","lastTransitionTime":"2025-12-03T16:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.175615 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.176938 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k865f\" (UniqueName: \"kubernetes.io/projected/b16d350b-678e-4558-ac4c-634fcdb2d2f7-kube-api-access-k865f\") pod \"multus-additional-cni-plugins-8v6vc\" (UID: \"b16d350b-678e-4558-ac4c-634fcdb2d2f7\") " pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.181054 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgvqd\" (UniqueName: \"kubernetes.io/projected/c1d64ada-fbf9-4b0e-abb6-9b29bfec7309-kube-api-access-hgvqd\") pod \"machine-config-daemon-bzb7f\" (UID: \"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\") " pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.192622 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.199854 5002 projected.go:288] Couldn't get configMap openshift-dns/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.199923 5002 projected.go:194] Error preparing data for projected volume kube-api-access-48mk6 for pod openshift-dns/node-resolver-ldz4j: failed to sync configmap cache: timed out waiting for the condition Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.200020 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/21cef49c-bd93-40d7-a2f7-0c8338455ee6-kube-api-access-48mk6 podName:21cef49c-bd93-40d7-a2f7-0c8338455ee6 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:43.699995259 +0000 UTC m=+27.113817147 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-48mk6" (UniqueName: "kubernetes.io/projected/21cef49c-bd93-40d7-a2f7-0c8338455ee6-kube-api-access-48mk6") pod "node-resolver-ldz4j" (UID: "21cef49c-bd93-40d7-a2f7-0c8338455ee6") : failed to sync configmap cache: timed out waiting for the condition Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.208590 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.222563 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.238299 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.255795 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.264850 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gjxps" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.267709 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.267851 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.267955 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.268051 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.268130 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:43Z","lastTransitionTime":"2025-12-03T16:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.272465 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: W1203 16:31:43.279288 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2de485fd_67c0_4be7_abb1_92509ea373da.slice/crio-215f6ef1f34ad605c679899a186709ae70468f5982128c0e02e63d0555cfcd0d WatchSource:0}: Error finding container 215f6ef1f34ad605c679899a186709ae70468f5982128c0e02e63d0555cfcd0d: Status 404 returned error can't find the container with id 215f6ef1f34ad605c679899a186709ae70468f5982128c0e02e63d0555cfcd0d Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.285062 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.286922 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.293417 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.300714 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: W1203 16:31:43.313269 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb16d350b_678e_4558_ac4c_634fcdb2d2f7.slice/crio-d8f68f22a35e3f9e1b206b1baa19cb989de5c0d039c471d83a5e6525baec1bb5 WatchSource:0}: Error finding container d8f68f22a35e3f9e1b206b1baa19cb989de5c0d039c471d83a5e6525baec1bb5: Status 404 returned error can't find the container with id d8f68f22a35e3f9e1b206b1baa19cb989de5c0d039c471d83a5e6525baec1bb5 Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.318897 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.338406 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.352362 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.367794 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.379288 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.379335 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.379350 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.379369 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.379381 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:43Z","lastTransitionTime":"2025-12-03T16:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.386737 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.400708 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4szh5"] Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.402654 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.403450 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.406775 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.407450 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.407618 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.407638 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.407766 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.416427 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.416911 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.417876 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.431967 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.452681 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459628 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-run-ovn-kubernetes\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459673 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovnkube-config\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459694 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-slash\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459708 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-ovn\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459733 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459774 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-cni-bin\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459791 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovnkube-script-lib\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459814 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-var-lib-openvswitch\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459848 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovn-node-metrics-cert\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459870 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-cni-netd\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459888 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-etc-openvswitch\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459957 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-systemd-units\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.459983 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-systemd\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.460011 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-env-overrides\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.460026 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pxr9\" (UniqueName: \"kubernetes.io/projected/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-kube-api-access-4pxr9\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.460044 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-run-netns\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.460062 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-node-log\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.460076 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-log-socket\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.460093 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-kubelet\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.460117 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-openvswitch\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.480019 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.483209 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.483253 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.483266 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.483289 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.483302 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:43Z","lastTransitionTime":"2025-12-03T16:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.501005 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.501130 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.523592 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.543316 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561252 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561360 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-etc-openvswitch\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561385 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-systemd-units\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561410 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-systemd\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561428 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pxr9\" (UniqueName: \"kubernetes.io/projected/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-kube-api-access-4pxr9\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561444 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-env-overrides\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561459 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-run-netns\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561505 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-run-netns\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.561526 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:31:51.561466926 +0000 UTC m=+34.975288924 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561565 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-systemd-units\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561591 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-systemd\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561601 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-node-log\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561674 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-log-socket\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561710 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561742 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-kubelet\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561809 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-openvswitch\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561844 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-run-ovn-kubernetes\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561902 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovnkube-config\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561940 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-ovn\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561968 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562005 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562037 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-slash\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562070 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-cni-bin\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562104 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovnkube-script-lib\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562143 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-var-lib-openvswitch\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562172 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovn-node-metrics-cert\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562233 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-cni-netd\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562327 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-env-overrides\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562364 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-cni-netd\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562431 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-ovn\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562471 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.562562 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.562623 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:51.562610537 +0000 UTC m=+34.976432605 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562667 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-slash\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562712 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-cni-bin\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562810 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovnkube-config\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.561546 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-etc-openvswitch\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562861 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-node-log\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.562887 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-log-socket\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.562954 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.562997 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:51.562984277 +0000 UTC m=+34.976806165 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.563027 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-kubelet\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.563051 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-openvswitch\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.563078 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-run-ovn-kubernetes\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.563138 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-var-lib-openvswitch\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.563554 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovnkube-script-lib\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.566975 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.567715 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovn-node-metrics-cert\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.585934 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.585977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.585989 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.586010 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.586024 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:43Z","lastTransitionTime":"2025-12-03T16:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.588029 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.591710 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pxr9\" (UniqueName: \"kubernetes.io/projected/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-kube-api-access-4pxr9\") pod \"ovnkube-node-4szh5\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.602173 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.616813 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.629613 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.641881 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.652783 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.662977 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.663041 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.663215 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.663255 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.663272 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.663215 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.663334 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:51.663313675 +0000 UTC m=+35.077135563 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.663335 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.663355 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.663383 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:31:51.663376137 +0000 UTC m=+35.077198025 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.664771 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.674573 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.688819 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.688852 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.688861 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.688879 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.688890 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:43Z","lastTransitionTime":"2025-12-03T16:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.695463 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.731028 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:43 crc kubenswrapper[5002]: W1203 16:31:43.749526 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc13f2ec_2d1e_4432_9f8d_82079a9dfe01.slice/crio-b7de1f69a6d71ed023463e83211b9db79010da03f9e2419ceb36470b10309fe5 WatchSource:0}: Error finding container b7de1f69a6d71ed023463e83211b9db79010da03f9e2419ceb36470b10309fe5: Status 404 returned error can't find the container with id b7de1f69a6d71ed023463e83211b9db79010da03f9e2419ceb36470b10309fe5 Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.764607 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48mk6\" (UniqueName: \"kubernetes.io/projected/21cef49c-bd93-40d7-a2f7-0c8338455ee6-kube-api-access-48mk6\") pod \"node-resolver-ldz4j\" (UID: \"21cef49c-bd93-40d7-a2f7-0c8338455ee6\") " pod="openshift-dns/node-resolver-ldz4j" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.771149 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48mk6\" (UniqueName: \"kubernetes.io/projected/21cef49c-bd93-40d7-a2f7-0c8338455ee6-kube-api-access-48mk6\") pod \"node-resolver-ldz4j\" (UID: \"21cef49c-bd93-40d7-a2f7-0c8338455ee6\") " pod="openshift-dns/node-resolver-ldz4j" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.792395 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.792465 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.792483 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.792872 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.792918 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:43Z","lastTransitionTime":"2025-12-03T16:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.839619 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.839691 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.840214 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:43 crc kubenswrapper[5002]: E1203 16:31:43.840359 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.902987 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.903019 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.903027 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.903041 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:43 crc kubenswrapper[5002]: I1203 16:31:43.903069 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:43Z","lastTransitionTime":"2025-12-03T16:31:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.009002 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.009048 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.009059 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.009074 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.009084 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:44Z","lastTransitionTime":"2025-12-03T16:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.015685 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.015780 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.015796 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"9aa8574add34eb3c8a15281e39d95e9db307cb29064922fca202eb93c3dfc390"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.016984 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gjxps" event={"ID":"2de485fd-67c0-4be7-abb1-92509ea373da","Type":"ContainerStarted","Data":"cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.017007 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gjxps" event={"ID":"2de485fd-67c0-4be7-abb1-92509ea373da","Type":"ContainerStarted","Data":"215f6ef1f34ad605c679899a186709ae70468f5982128c0e02e63d0555cfcd0d"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.018095 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.018161 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"b7de1f69a6d71ed023463e83211b9db79010da03f9e2419ceb36470b10309fe5"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.019053 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" event={"ID":"b16d350b-678e-4558-ac4c-634fcdb2d2f7","Type":"ContainerStarted","Data":"488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.019108 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" event={"ID":"b16d350b-678e-4558-ac4c-634fcdb2d2f7","Type":"ContainerStarted","Data":"d8f68f22a35e3f9e1b206b1baa19cb989de5c0d039c471d83a5e6525baec1bb5"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.031047 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.041054 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-ldz4j" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.043190 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.062863 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.076633 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.087934 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.102305 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.112058 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.112110 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.112125 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.112146 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.112159 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:44Z","lastTransitionTime":"2025-12-03T16:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.118833 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.132817 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.148109 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.163664 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: W1203 16:31:44.181631 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21cef49c_bd93_40d7_a2f7_0c8338455ee6.slice/crio-31e2f467790e68616d07f86b27f9578654de1b3ebd49c1233726a20e86adb70c WatchSource:0}: Error finding container 31e2f467790e68616d07f86b27f9578654de1b3ebd49c1233726a20e86adb70c: Status 404 returned error can't find the container with id 31e2f467790e68616d07f86b27f9578654de1b3ebd49c1233726a20e86adb70c Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.181946 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.196842 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.215440 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.215490 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.215503 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.215526 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.215537 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:44Z","lastTransitionTime":"2025-12-03T16:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.221731 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.233422 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.249882 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.267538 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.278961 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.296218 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.310853 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.329073 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.333266 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.333316 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.333327 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.333347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.333361 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:44Z","lastTransitionTime":"2025-12-03T16:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.349949 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.369081 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.382029 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.399269 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.412110 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.427595 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.435578 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.435629 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.435641 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.435664 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.435676 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:44Z","lastTransitionTime":"2025-12-03T16:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.448653 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.465043 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.539026 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.539093 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.539103 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.539120 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.539132 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:44Z","lastTransitionTime":"2025-12-03T16:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.641951 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.641994 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.642008 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.642024 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.642035 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:44Z","lastTransitionTime":"2025-12-03T16:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.744864 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.744921 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.744933 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.744954 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.744970 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:44Z","lastTransitionTime":"2025-12-03T16:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.842878 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:44 crc kubenswrapper[5002]: E1203 16:31:44.843068 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.847536 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.847579 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.847591 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.847613 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.847625 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:44Z","lastTransitionTime":"2025-12-03T16:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.950625 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.950666 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.950676 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.950693 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:44 crc kubenswrapper[5002]: I1203 16:31:44.950704 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:44Z","lastTransitionTime":"2025-12-03T16:31:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.033793 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-ldz4j" event={"ID":"21cef49c-bd93-40d7-a2f7-0c8338455ee6","Type":"ContainerStarted","Data":"a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.034233 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-ldz4j" event={"ID":"21cef49c-bd93-40d7-a2f7-0c8338455ee6","Type":"ContainerStarted","Data":"31e2f467790e68616d07f86b27f9578654de1b3ebd49c1233726a20e86adb70c"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.036880 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4" exitCode=0 Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.037058 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.040030 5002 generic.go:334] "Generic (PLEG): container finished" podID="b16d350b-678e-4558-ac4c-634fcdb2d2f7" containerID="488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675" exitCode=0 Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.040075 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" event={"ID":"b16d350b-678e-4558-ac4c-634fcdb2d2f7","Type":"ContainerDied","Data":"488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.049766 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.054665 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.054708 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.054721 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.054777 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.054792 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:45Z","lastTransitionTime":"2025-12-03T16:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.061139 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.085876 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.102061 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.111952 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.125177 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.140729 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.157188 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.159950 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.160016 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.160032 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.160057 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.160074 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:45Z","lastTransitionTime":"2025-12-03T16:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.172423 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.184803 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.198363 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.216883 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.231786 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.246316 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.261546 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.263882 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.263935 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.263953 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.263980 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.264030 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:45Z","lastTransitionTime":"2025-12-03T16:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.282020 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.295611 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.311366 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.324740 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.345566 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.362988 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.371089 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.371142 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.371157 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.371178 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.371192 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:45Z","lastTransitionTime":"2025-12-03T16:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.380684 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.394804 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.409772 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.424002 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.438212 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.458587 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.474014 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.474083 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.474094 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.474126 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.474144 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:45Z","lastTransitionTime":"2025-12-03T16:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.477286 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.578472 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.578890 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.578902 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.578922 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.578937 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:45Z","lastTransitionTime":"2025-12-03T16:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.681907 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.681951 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.681964 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.681985 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.681997 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:45Z","lastTransitionTime":"2025-12-03T16:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.784533 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.784588 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.784602 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.784625 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.784640 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:45Z","lastTransitionTime":"2025-12-03T16:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.839826 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:45 crc kubenswrapper[5002]: E1203 16:31:45.839972 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.840108 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:45 crc kubenswrapper[5002]: E1203 16:31:45.840333 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.901429 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.901459 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.901468 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.901481 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:45 crc kubenswrapper[5002]: I1203 16:31:45.901491 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:45Z","lastTransitionTime":"2025-12-03T16:31:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.003487 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.003833 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.003844 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.003861 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.003871 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:46Z","lastTransitionTime":"2025-12-03T16:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.059005 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.059055 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.059068 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.059079 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.061252 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" event={"ID":"b16d350b-678e-4558-ac4c-634fcdb2d2f7","Type":"ContainerStarted","Data":"fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.062394 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.074560 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.094807 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.106281 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.106314 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.106326 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.106342 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.106352 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:46Z","lastTransitionTime":"2025-12-03T16:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.107825 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.124257 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.139028 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.159628 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.171669 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.184227 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.206530 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.208841 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.208886 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.208895 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.208915 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.208925 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:46Z","lastTransitionTime":"2025-12-03T16:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.220381 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.232200 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.243362 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.258797 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.290433 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.319820 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.319864 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.319890 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.319912 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.319927 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:46Z","lastTransitionTime":"2025-12-03T16:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.321414 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.341727 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.356328 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.375443 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.423019 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.423071 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.423083 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.423108 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.423120 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:46Z","lastTransitionTime":"2025-12-03T16:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.458822 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.481126 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.500415 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.518713 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.526170 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.526214 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.526229 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.526249 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.526261 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:46Z","lastTransitionTime":"2025-12-03T16:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.534055 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.588440 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.605468 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.618942 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.629355 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.629406 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.629416 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.629436 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.629447 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:46Z","lastTransitionTime":"2025-12-03T16:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.635547 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.655493 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.705140 5002 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.731967 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.732018 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.732031 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.732051 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.732065 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:46Z","lastTransitionTime":"2025-12-03T16:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.835558 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.835606 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.835616 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.835633 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.835646 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:46Z","lastTransitionTime":"2025-12-03T16:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.840066 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:46 crc kubenswrapper[5002]: E1203 16:31:46.840187 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.858919 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.873623 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.888044 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.903571 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.922469 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.935326 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.938158 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.938210 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.938219 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.938235 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.938244 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:46Z","lastTransitionTime":"2025-12-03T16:31:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.946582 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.960479 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.974030 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:46 crc kubenswrapper[5002]: I1203 16:31:46.995447 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.014359 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.028308 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.041059 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.041136 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.041150 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.041180 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.041195 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:47Z","lastTransitionTime":"2025-12-03T16:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.043957 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.060048 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.069423 5002 generic.go:334] "Generic (PLEG): container finished" podID="b16d350b-678e-4558-ac4c-634fcdb2d2f7" containerID="fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151" exitCode=0 Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.069873 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" event={"ID":"b16d350b-678e-4558-ac4c-634fcdb2d2f7","Type":"ContainerDied","Data":"fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151"} Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.074828 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af"} Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.074938 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb"} Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.090244 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.108443 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.123968 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.141534 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.159365 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.159744 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.159800 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.159811 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.159829 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.159844 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:47Z","lastTransitionTime":"2025-12-03T16:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.175340 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.190897 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.205301 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.223464 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.248501 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.262147 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.262205 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.262218 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.262237 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.262255 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:47Z","lastTransitionTime":"2025-12-03T16:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.284384 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.327589 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.364547 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.364603 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.364635 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.364713 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.364729 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:47Z","lastTransitionTime":"2025-12-03T16:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.369276 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.409407 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.467323 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.467367 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.467379 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.467395 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.467407 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:47Z","lastTransitionTime":"2025-12-03T16:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.570352 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.570410 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.570420 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.570434 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.570444 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:47Z","lastTransitionTime":"2025-12-03T16:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.673425 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.673469 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.673479 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.673495 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.673505 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:47Z","lastTransitionTime":"2025-12-03T16:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.776520 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.776567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.776582 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.776601 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.776639 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:47Z","lastTransitionTime":"2025-12-03T16:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.839631 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.839631 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:47 crc kubenswrapper[5002]: E1203 16:31:47.839958 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:47 crc kubenswrapper[5002]: E1203 16:31:47.840058 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.879508 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.879604 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.879623 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.879652 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.879672 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:47Z","lastTransitionTime":"2025-12-03T16:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.983589 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.983683 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.983707 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.983794 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:47 crc kubenswrapper[5002]: I1203 16:31:47.983834 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:47Z","lastTransitionTime":"2025-12-03T16:31:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.081134 5002 generic.go:334] "Generic (PLEG): container finished" podID="b16d350b-678e-4558-ac4c-634fcdb2d2f7" containerID="62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394" exitCode=0 Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.081199 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" event={"ID":"b16d350b-678e-4558-ac4c-634fcdb2d2f7","Type":"ContainerDied","Data":"62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394"} Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.085988 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.086029 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.086042 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.086062 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.086077 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.103209 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.129193 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.151557 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.170507 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.190155 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.190257 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.190283 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.190320 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.190348 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.194450 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.217887 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.252293 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.270547 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.290467 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.294250 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.294299 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.294314 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.294334 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.294347 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.323445 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.345102 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.356104 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.372652 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.393771 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.397201 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.397237 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.397250 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.397274 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.397290 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.406931 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.425406 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.443613 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.459270 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.472270 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.485266 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.494892 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.499907 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.499944 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.499956 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.499977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.499990 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.516068 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.539597 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.552556 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.570408 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.585352 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.603168 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.603223 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.603237 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.603261 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.603276 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.608085 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.628706 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.642864 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.706824 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.706890 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.706903 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.706929 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.706943 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.809921 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.809977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.809987 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.810009 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.810020 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.839812 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:48 crc kubenswrapper[5002]: E1203 16:31:48.840067 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.860379 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.860453 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.860471 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.860497 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.860516 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: E1203 16:31:48.878617 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.884882 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.884949 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.884961 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.884981 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.884993 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: E1203 16:31:48.906395 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.911890 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.911944 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.911955 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.911975 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.911987 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: E1203 16:31:48.932358 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.937983 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.938063 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.938084 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.938113 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.938136 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: E1203 16:31:48.963220 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.969981 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.970063 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.970089 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.970133 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.970158 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:48 crc kubenswrapper[5002]: E1203 16:31:48.990674 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:48 crc kubenswrapper[5002]: E1203 16:31:48.990889 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.993066 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.993114 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.993125 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.993142 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:48 crc kubenswrapper[5002]: I1203 16:31:48.993154 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:48Z","lastTransitionTime":"2025-12-03T16:31:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.090167 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" event={"ID":"b16d350b-678e-4558-ac4c-634fcdb2d2f7","Type":"ContainerStarted","Data":"e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0"} Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.096045 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.096296 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.096415 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.096524 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.096622 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:49Z","lastTransitionTime":"2025-12-03T16:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.200586 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.200638 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.200651 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.200671 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.200685 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:49Z","lastTransitionTime":"2025-12-03T16:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.303930 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.304006 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.304025 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.304056 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.304080 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:49Z","lastTransitionTime":"2025-12-03T16:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.407599 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.407653 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.407665 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.407691 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.407705 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:49Z","lastTransitionTime":"2025-12-03T16:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.511017 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.511092 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.511106 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.511132 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.511146 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:49Z","lastTransitionTime":"2025-12-03T16:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.614478 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.614546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.614566 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.614591 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.614608 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:49Z","lastTransitionTime":"2025-12-03T16:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.717778 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.717819 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.717828 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.717843 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.717853 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:49Z","lastTransitionTime":"2025-12-03T16:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.820657 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.820712 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.820721 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.820740 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.820788 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:49Z","lastTransitionTime":"2025-12-03T16:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.840560 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.840634 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:49 crc kubenswrapper[5002]: E1203 16:31:49.840818 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:49 crc kubenswrapper[5002]: E1203 16:31:49.840954 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.923629 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.923681 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.923700 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.923719 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:49 crc kubenswrapper[5002]: I1203 16:31:49.923732 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:49Z","lastTransitionTime":"2025-12-03T16:31:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.026831 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.026871 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.026880 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.026894 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.026903 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:50Z","lastTransitionTime":"2025-12-03T16:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.097943 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c"} Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.100708 5002 generic.go:334] "Generic (PLEG): container finished" podID="b16d350b-678e-4558-ac4c-634fcdb2d2f7" containerID="e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0" exitCode=0 Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.100812 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" event={"ID":"b16d350b-678e-4558-ac4c-634fcdb2d2f7","Type":"ContainerDied","Data":"e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0"} Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.121107 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.129745 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.129810 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.129826 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.129848 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.129865 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:50Z","lastTransitionTime":"2025-12-03T16:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.138833 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.154901 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.170985 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.188949 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.206211 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.222015 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.232268 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.232321 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.232333 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.232350 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.232362 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:50Z","lastTransitionTime":"2025-12-03T16:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.240560 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.254137 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.268618 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.280146 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.292589 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.304179 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.323291 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.334637 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.334672 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.334683 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.334716 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.334730 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:50Z","lastTransitionTime":"2025-12-03T16:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.438230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.438276 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.438286 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.438301 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.438314 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:50Z","lastTransitionTime":"2025-12-03T16:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.541071 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.541120 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.541130 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.541148 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.541159 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:50Z","lastTransitionTime":"2025-12-03T16:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.644695 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.644743 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.644766 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.644786 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.644797 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:50Z","lastTransitionTime":"2025-12-03T16:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.747887 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.747946 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.747959 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.747979 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.747991 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:50Z","lastTransitionTime":"2025-12-03T16:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.839567 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:50 crc kubenswrapper[5002]: E1203 16:31:50.839763 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.850252 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.850325 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.850347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.850374 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.850393 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:50Z","lastTransitionTime":"2025-12-03T16:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.952716 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.952803 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.952814 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.952827 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:50 crc kubenswrapper[5002]: I1203 16:31:50.952838 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:50Z","lastTransitionTime":"2025-12-03T16:31:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.056282 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.056366 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.056379 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.056404 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.056416 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:51Z","lastTransitionTime":"2025-12-03T16:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.106163 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" event={"ID":"b16d350b-678e-4558-ac4c-634fcdb2d2f7","Type":"ContainerStarted","Data":"0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10"} Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.125783 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.141680 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.157070 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.158994 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.159085 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.159115 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.159153 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.159181 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:51Z","lastTransitionTime":"2025-12-03T16:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.174818 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.190450 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.207387 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.223485 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.238702 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.261466 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.262257 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.262314 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.262334 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.262364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.262383 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:51Z","lastTransitionTime":"2025-12-03T16:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.282462 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.297518 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.314420 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.338189 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.355490 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.370597 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.370676 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.370692 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.370713 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.370770 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:51Z","lastTransitionTime":"2025-12-03T16:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.473776 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.473816 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.473825 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.473844 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.473854 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:51Z","lastTransitionTime":"2025-12-03T16:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.564311 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.564469 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:32:07.564439993 +0000 UTC m=+50.978261931 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.565154 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.565226 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.565419 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.565493 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:32:07.565476511 +0000 UTC m=+50.979298439 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.565502 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.565651 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:32:07.565606514 +0000 UTC m=+50.979428542 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.577647 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.577723 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.577782 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.577809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.577824 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:51Z","lastTransitionTime":"2025-12-03T16:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.666618 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.666690 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.666998 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.667026 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.667025 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.667092 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.667114 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.667210 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:32:07.667173476 +0000 UTC m=+51.080995534 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.667042 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.667403 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:32:07.667377861 +0000 UTC m=+51.081199949 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.682080 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.682141 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.682158 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.682178 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.682193 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:51Z","lastTransitionTime":"2025-12-03T16:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.784360 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.784420 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.784434 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.784458 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.784471 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:51Z","lastTransitionTime":"2025-12-03T16:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.840334 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.840444 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.840511 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:51 crc kubenswrapper[5002]: E1203 16:31:51.840621 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.888317 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.888394 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.888407 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.888443 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:51 crc kubenswrapper[5002]: I1203 16:31:51.888458 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:51Z","lastTransitionTime":"2025-12-03T16:31:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.014050 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.014109 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.014169 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.014188 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.014201 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:52Z","lastTransitionTime":"2025-12-03T16:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.115024 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518"} Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.115736 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.115852 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.117665 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.117698 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.117711 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.117725 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.117772 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:52Z","lastTransitionTime":"2025-12-03T16:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.120082 5002 generic.go:334] "Generic (PLEG): container finished" podID="b16d350b-678e-4558-ac4c-634fcdb2d2f7" containerID="0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10" exitCode=0 Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.120135 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" event={"ID":"b16d350b-678e-4558-ac4c-634fcdb2d2f7","Type":"ContainerDied","Data":"0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10"} Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.140332 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.145223 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.150246 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.157107 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.174235 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.187011 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.204715 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.217854 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.220476 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.220512 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.220520 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.220537 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.220547 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:52Z","lastTransitionTime":"2025-12-03T16:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.238703 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.252663 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.279067 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.298354 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.320452 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.322791 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.322839 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.322851 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.322867 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.322882 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:52Z","lastTransitionTime":"2025-12-03T16:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.334062 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.348999 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.361247 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.378108 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.393785 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.408863 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.424039 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.426037 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.426076 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.426086 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.426103 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.426117 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:52Z","lastTransitionTime":"2025-12-03T16:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.437507 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.467824 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.489599 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.511733 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.529853 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.529888 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.529899 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.529916 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.529928 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:52Z","lastTransitionTime":"2025-12-03T16:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.533101 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.552384 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.568917 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.586371 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.599110 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.614692 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.632734 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.632800 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.632809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.632826 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.632836 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:52Z","lastTransitionTime":"2025-12-03T16:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.735639 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.735684 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.735696 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.735717 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.735731 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:52Z","lastTransitionTime":"2025-12-03T16:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.838501 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.838546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.838557 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.838574 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.838585 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:52Z","lastTransitionTime":"2025-12-03T16:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.840103 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:52 crc kubenswrapper[5002]: E1203 16:31:52.840382 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.942743 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.942831 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.942850 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.942875 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:52 crc kubenswrapper[5002]: I1203 16:31:52.942893 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:52Z","lastTransitionTime":"2025-12-03T16:31:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.046671 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.047039 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.047109 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.047184 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.047260 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:53Z","lastTransitionTime":"2025-12-03T16:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.128086 5002 generic.go:334] "Generic (PLEG): container finished" podID="b16d350b-678e-4558-ac4c-634fcdb2d2f7" containerID="c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72" exitCode=0 Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.128214 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" event={"ID":"b16d350b-678e-4558-ac4c-634fcdb2d2f7","Type":"ContainerDied","Data":"c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72"} Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.128521 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.153147 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.162419 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.162602 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.162677 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.162772 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.162866 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:53Z","lastTransitionTime":"2025-12-03T16:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.228060 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.264654 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.278945 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.278971 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.278978 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.278991 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.278999 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:53Z","lastTransitionTime":"2025-12-03T16:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.306581 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.347736 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.381589 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.381859 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.381948 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.382064 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.382152 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:53Z","lastTransitionTime":"2025-12-03T16:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.398047 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.406420 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.417951 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.428007 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.445618 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.457967 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.469532 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.482799 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.484443 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.484482 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.484524 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.484547 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.484559 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:53Z","lastTransitionTime":"2025-12-03T16:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.498231 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.587235 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.587287 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.587302 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.587321 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.587333 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:53Z","lastTransitionTime":"2025-12-03T16:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.694015 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.694072 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.694086 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.694106 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.694120 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:53Z","lastTransitionTime":"2025-12-03T16:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.796510 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.796547 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.796557 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.796572 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.796582 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:53Z","lastTransitionTime":"2025-12-03T16:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.840102 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.840159 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:53 crc kubenswrapper[5002]: E1203 16:31:53.840253 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:53 crc kubenswrapper[5002]: E1203 16:31:53.840442 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.898452 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.898511 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.898522 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.898539 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:53 crc kubenswrapper[5002]: I1203 16:31:53.898552 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:53Z","lastTransitionTime":"2025-12-03T16:31:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.001507 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.001551 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.001564 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.001584 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.001598 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:54Z","lastTransitionTime":"2025-12-03T16:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.104495 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.104545 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.104559 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.104579 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.104593 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:54Z","lastTransitionTime":"2025-12-03T16:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.144292 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.144372 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" event={"ID":"b16d350b-678e-4558-ac4c-634fcdb2d2f7","Type":"ContainerStarted","Data":"c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11"} Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.161352 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.174149 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.188433 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.201153 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.208088 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.208118 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.208131 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.208147 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.208185 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:54Z","lastTransitionTime":"2025-12-03T16:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.214826 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.228166 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.242799 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.255718 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.266059 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.277440 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.289626 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.303491 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.310903 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.310954 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.310966 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.310985 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.310999 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:54Z","lastTransitionTime":"2025-12-03T16:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.329856 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.342297 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.413023 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.413055 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.413066 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.413082 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.413094 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:54Z","lastTransitionTime":"2025-12-03T16:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.515382 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.515418 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.515429 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.515445 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.515458 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:54Z","lastTransitionTime":"2025-12-03T16:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.618546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.618583 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.618598 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.618615 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.618628 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:54Z","lastTransitionTime":"2025-12-03T16:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.720929 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.720959 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.720968 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.720981 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.720991 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:54Z","lastTransitionTime":"2025-12-03T16:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.823417 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.823464 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.823478 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.823496 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.823511 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:54Z","lastTransitionTime":"2025-12-03T16:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.841728 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:54 crc kubenswrapper[5002]: E1203 16:31:54.841876 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.925732 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.925821 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.925834 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.925853 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:54 crc kubenswrapper[5002]: I1203 16:31:54.925863 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:54Z","lastTransitionTime":"2025-12-03T16:31:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.028335 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.028386 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.028399 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.028418 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.028431 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:55Z","lastTransitionTime":"2025-12-03T16:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.132070 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.132120 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.132129 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.132146 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.132156 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:55Z","lastTransitionTime":"2025-12-03T16:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.236177 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.236258 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.236272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.236294 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.236311 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:55Z","lastTransitionTime":"2025-12-03T16:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.339129 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.339171 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.339183 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.339202 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.339214 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:55Z","lastTransitionTime":"2025-12-03T16:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.441313 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.441356 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.441367 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.441380 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.441389 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:55Z","lastTransitionTime":"2025-12-03T16:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.544223 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.544445 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.544454 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.544469 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.544481 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:55Z","lastTransitionTime":"2025-12-03T16:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.646873 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.646916 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.646926 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.646944 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.646954 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:55Z","lastTransitionTime":"2025-12-03T16:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.722053 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r"] Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.722584 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.726243 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.726315 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.740386 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.750526 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.750587 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.750605 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.750636 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.750654 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:55Z","lastTransitionTime":"2025-12-03T16:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.753459 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.777955 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.796456 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.808700 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.812147 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f47d5289-b50f-4012-bc4d-2aa9df7864c8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dvd6r\" (UID: \"f47d5289-b50f-4012-bc4d-2aa9df7864c8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.812219 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f47d5289-b50f-4012-bc4d-2aa9df7864c8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dvd6r\" (UID: \"f47d5289-b50f-4012-bc4d-2aa9df7864c8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.812285 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f47d5289-b50f-4012-bc4d-2aa9df7864c8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dvd6r\" (UID: \"f47d5289-b50f-4012-bc4d-2aa9df7864c8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.812309 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88zbl\" (UniqueName: \"kubernetes.io/projected/f47d5289-b50f-4012-bc4d-2aa9df7864c8-kube-api-access-88zbl\") pod \"ovnkube-control-plane-749d76644c-dvd6r\" (UID: \"f47d5289-b50f-4012-bc4d-2aa9df7864c8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.824464 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.839542 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.839598 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:55 crc kubenswrapper[5002]: E1203 16:31:55.839682 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.839575 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: E1203 16:31:55.839852 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.852471 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.853809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.853847 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.853859 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.853877 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.853889 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:55Z","lastTransitionTime":"2025-12-03T16:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.868293 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.881088 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.896674 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.912807 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f47d5289-b50f-4012-bc4d-2aa9df7864c8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dvd6r\" (UID: \"f47d5289-b50f-4012-bc4d-2aa9df7864c8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.912867 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f47d5289-b50f-4012-bc4d-2aa9df7864c8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dvd6r\" (UID: \"f47d5289-b50f-4012-bc4d-2aa9df7864c8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.912944 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f47d5289-b50f-4012-bc4d-2aa9df7864c8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dvd6r\" (UID: \"f47d5289-b50f-4012-bc4d-2aa9df7864c8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.912974 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88zbl\" (UniqueName: \"kubernetes.io/projected/f47d5289-b50f-4012-bc4d-2aa9df7864c8-kube-api-access-88zbl\") pod \"ovnkube-control-plane-749d76644c-dvd6r\" (UID: \"f47d5289-b50f-4012-bc4d-2aa9df7864c8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.913648 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f47d5289-b50f-4012-bc4d-2aa9df7864c8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dvd6r\" (UID: \"f47d5289-b50f-4012-bc4d-2aa9df7864c8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.913811 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f47d5289-b50f-4012-bc4d-2aa9df7864c8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dvd6r\" (UID: \"f47d5289-b50f-4012-bc4d-2aa9df7864c8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.918823 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f47d5289-b50f-4012-bc4d-2aa9df7864c8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dvd6r\" (UID: \"f47d5289-b50f-4012-bc4d-2aa9df7864c8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.918911 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.931214 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.935010 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88zbl\" (UniqueName: \"kubernetes.io/projected/f47d5289-b50f-4012-bc4d-2aa9df7864c8-kube-api-access-88zbl\") pod \"ovnkube-control-plane-749d76644c-dvd6r\" (UID: \"f47d5289-b50f-4012-bc4d-2aa9df7864c8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.943693 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.957010 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.957245 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.957357 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.957466 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.957572 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:55Z","lastTransitionTime":"2025-12-03T16:31:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:55 crc kubenswrapper[5002]: I1203 16:31:55.957523 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.044555 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.062336 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.062385 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.062399 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.062418 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.062432 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:56Z","lastTransitionTime":"2025-12-03T16:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.152676 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" event={"ID":"f47d5289-b50f-4012-bc4d-2aa9df7864c8","Type":"ContainerStarted","Data":"a029225836f667b7bb40d9bc2cc3b571e65e64deefaf92ba67ef181c21ae2176"} Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.165179 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.165229 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.165250 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.165271 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.165298 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:56Z","lastTransitionTime":"2025-12-03T16:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.273369 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.273432 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.273447 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.273469 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.273483 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:56Z","lastTransitionTime":"2025-12-03T16:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.376623 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.376664 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.376674 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.376692 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.376703 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:56Z","lastTransitionTime":"2025-12-03T16:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.479832 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.479890 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.479902 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.479921 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.479932 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:56Z","lastTransitionTime":"2025-12-03T16:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.583264 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.583298 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.583310 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.583328 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.583343 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:56Z","lastTransitionTime":"2025-12-03T16:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.693500 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.693554 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.693565 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.693589 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.693600 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:56Z","lastTransitionTime":"2025-12-03T16:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.796483 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.797034 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.797047 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.797070 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.797082 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:56Z","lastTransitionTime":"2025-12-03T16:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.840409 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:56 crc kubenswrapper[5002]: E1203 16:31:56.840592 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.895477 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:56Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.899150 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.899184 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.899195 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.899213 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.899226 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:56Z","lastTransitionTime":"2025-12-03T16:31:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.909773 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:56Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.934696 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:56Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.958911 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:56Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.972662 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:56Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:56 crc kubenswrapper[5002]: I1203 16:31:56.997103 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:56Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.001573 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.001622 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.001638 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.001656 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.001671 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:57Z","lastTransitionTime":"2025-12-03T16:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.020005 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.037970 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.052905 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.064897 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.081477 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.101281 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.104933 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.105006 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.105027 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.105057 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.105075 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:57Z","lastTransitionTime":"2025-12-03T16:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.116775 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.130641 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.146515 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.159561 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/0.log" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.162808 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518" exitCode=1 Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.162869 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518"} Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.164082 5002 scope.go:117] "RemoveContainer" containerID="28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.166024 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" event={"ID":"f47d5289-b50f-4012-bc4d-2aa9df7864c8","Type":"ContainerStarted","Data":"e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4"} Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.166080 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" event={"ID":"f47d5289-b50f-4012-bc4d-2aa9df7864c8","Type":"ContainerStarted","Data":"f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1"} Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.180730 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.201889 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.208980 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.209043 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.209061 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.209087 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.209256 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:57Z","lastTransitionTime":"2025-12-03T16:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.218722 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.233283 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.249857 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.261518 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-c7qvw"] Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.262074 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:31:57 crc kubenswrapper[5002]: E1203 16:31:57.262142 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.266989 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.280597 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.308295 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:31:56Z\\\",\\\"message\\\":\\\"4403 6270 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 16:31:56.684687 6270 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:56.684789 6270 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:31:56.684873 6270 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:31:56.684910 6270 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:31:56.684911 6270 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 16:31:56.684923 6270 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:31:56.684943 6270 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:31:56.684953 6270 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:31:56.684965 6270 factory.go:656] Stopping watch factory\\\\nI1203 16:31:56.684981 6270 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:31:56.685008 6270 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:31:56.685017 6270 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.312328 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.312386 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.312400 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.312422 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.312436 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:57Z","lastTransitionTime":"2025-12-03T16:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.327668 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.328467 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96xrg\" (UniqueName: \"kubernetes.io/projected/24141739-e7a8-40cf-ab9e-267ee876230b-kube-api-access-96xrg\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.328539 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.340550 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.356178 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.370980 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.385822 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.399833 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.415509 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.415574 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.415586 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.415610 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.415623 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:57Z","lastTransitionTime":"2025-12-03T16:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.419259 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.429182 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.429346 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96xrg\" (UniqueName: \"kubernetes.io/projected/24141739-e7a8-40cf-ab9e-267ee876230b-kube-api-access-96xrg\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:31:57 crc kubenswrapper[5002]: E1203 16:31:57.429381 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:31:57 crc kubenswrapper[5002]: E1203 16:31:57.429450 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs podName:24141739-e7a8-40cf-ab9e-267ee876230b nodeName:}" failed. No retries permitted until 2025-12-03 16:31:57.929430523 +0000 UTC m=+41.343252411 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs") pod "network-metrics-daemon-c7qvw" (UID: "24141739-e7a8-40cf-ab9e-267ee876230b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.434598 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.447903 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.459564 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96xrg\" (UniqueName: \"kubernetes.io/projected/24141739-e7a8-40cf-ab9e-267ee876230b-kube-api-access-96xrg\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.465321 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.479354 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.493435 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.511141 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.518651 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.518707 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.518717 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.518737 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.518779 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:57Z","lastTransitionTime":"2025-12-03T16:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.532624 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.548367 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.565047 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.579119 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.590923 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.604554 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.627881 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.627933 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.627948 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.627964 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.627988 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:57Z","lastTransitionTime":"2025-12-03T16:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.634413 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.649271 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.698952 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:31:56Z\\\",\\\"message\\\":\\\"4403 6270 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 16:31:56.684687 6270 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:56.684789 6270 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:31:56.684873 6270 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:31:56.684910 6270 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:31:56.684911 6270 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 16:31:56.684923 6270 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:31:56.684943 6270 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:31:56.684953 6270 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:31:56.684965 6270 factory.go:656] Stopping watch factory\\\\nI1203 16:31:56.684981 6270 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:31:56.685008 6270 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:31:56.685017 6270 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.720635 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.730880 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.730927 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.730937 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.730956 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.730967 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:57Z","lastTransitionTime":"2025-12-03T16:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.836500 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.836887 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.837031 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.837139 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.837233 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:57Z","lastTransitionTime":"2025-12-03T16:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.839737 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:57 crc kubenswrapper[5002]: E1203 16:31:57.839878 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.839745 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:57 crc kubenswrapper[5002]: E1203 16:31:57.840136 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.935241 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:31:57 crc kubenswrapper[5002]: E1203 16:31:57.935420 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:31:57 crc kubenswrapper[5002]: E1203 16:31:57.935887 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs podName:24141739-e7a8-40cf-ab9e-267ee876230b nodeName:}" failed. No retries permitted until 2025-12-03 16:31:58.935864322 +0000 UTC m=+42.349686210 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs") pod "network-metrics-daemon-c7qvw" (UID: "24141739-e7a8-40cf-ab9e-267ee876230b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.940368 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.940400 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.940413 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.940433 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:57 crc kubenswrapper[5002]: I1203 16:31:57.940449 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:57Z","lastTransitionTime":"2025-12-03T16:31:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.043066 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.043111 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.043121 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.043136 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.043147 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:58Z","lastTransitionTime":"2025-12-03T16:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.146426 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.146480 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.146490 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.146514 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.146524 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:58Z","lastTransitionTime":"2025-12-03T16:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.173160 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/0.log" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.177859 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd"} Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.178078 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.197822 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.216105 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.249590 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.250096 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.250108 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.250126 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.250135 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:58Z","lastTransitionTime":"2025-12-03T16:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.283001 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:31:56Z\\\",\\\"message\\\":\\\"4403 6270 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 16:31:56.684687 6270 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:56.684789 6270 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:31:56.684873 6270 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:31:56.684910 6270 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:31:56.684911 6270 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 16:31:56.684923 6270 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:31:56.684943 6270 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:31:56.684953 6270 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:31:56.684965 6270 factory.go:656] Stopping watch factory\\\\nI1203 16:31:56.684981 6270 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:31:56.685008 6270 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:31:56.685017 6270 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.300584 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.316995 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.398247 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.398292 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.398304 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.398328 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.398339 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:58Z","lastTransitionTime":"2025-12-03T16:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.412770 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.427738 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.442786 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.458344 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.473931 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.487514 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.501906 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.501958 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.501969 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.501990 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.502004 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:58Z","lastTransitionTime":"2025-12-03T16:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.505097 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.518970 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.533178 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.546510 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.559190 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.604441 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.604502 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.604523 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.604549 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.604563 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:58Z","lastTransitionTime":"2025-12-03T16:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.707579 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.707627 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.707642 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.707663 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.707675 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:58Z","lastTransitionTime":"2025-12-03T16:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.810399 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.810456 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.810470 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.810495 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.810514 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:58Z","lastTransitionTime":"2025-12-03T16:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.840156 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.840156 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:31:58 crc kubenswrapper[5002]: E1203 16:31:58.840357 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:31:58 crc kubenswrapper[5002]: E1203 16:31:58.840450 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.913589 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.913652 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.913666 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.913685 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.913697 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:58Z","lastTransitionTime":"2025-12-03T16:31:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:58 crc kubenswrapper[5002]: I1203 16:31:58.947847 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:31:58 crc kubenswrapper[5002]: E1203 16:31:58.948078 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:31:58 crc kubenswrapper[5002]: E1203 16:31:58.948171 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs podName:24141739-e7a8-40cf-ab9e-267ee876230b nodeName:}" failed. No retries permitted until 2025-12-03 16:32:00.948146754 +0000 UTC m=+44.361968832 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs") pod "network-metrics-daemon-c7qvw" (UID: "24141739-e7a8-40cf-ab9e-267ee876230b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.017225 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.017276 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.017288 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.017308 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.017320 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.083510 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.083589 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.083606 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.083631 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.083647 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: E1203 16:31:59.100589 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.106389 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.106462 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.106476 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.106498 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.106513 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: E1203 16:31:59.121836 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.127806 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.127867 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.127886 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.127914 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.127933 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: E1203 16:31:59.147251 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.152459 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.152523 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.152551 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.152586 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.152611 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: E1203 16:31:59.167740 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.174167 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.174237 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.174253 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.174276 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.174293 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.183619 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/1.log" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.184855 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/0.log" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.188957 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd" exitCode=1 Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.189019 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd"} Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.189098 5002 scope.go:117] "RemoveContainer" containerID="28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.190552 5002 scope.go:117] "RemoveContainer" containerID="9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd" Dec 03 16:31:59 crc kubenswrapper[5002]: E1203 16:31:59.190979 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" Dec 03 16:31:59 crc kubenswrapper[5002]: E1203 16:31:59.201910 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: E1203 16:31:59.202159 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.204741 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.204808 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.204827 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.204853 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.204871 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.221609 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:31:56Z\\\",\\\"message\\\":\\\"4403 6270 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 16:31:56.684687 6270 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:56.684789 6270 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:31:56.684873 6270 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:31:56.684910 6270 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:31:56.684911 6270 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 16:31:56.684923 6270 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:31:56.684943 6270 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:31:56.684953 6270 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:31:56.684965 6270 factory.go:656] Stopping watch factory\\\\nI1203 16:31:56.684981 6270 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:31:56.685008 6270 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:31:56.685017 6270 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:31:58Z\\\",\\\"message\\\":\\\"\\\\nI1203 16:31:58.465514 6484 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:31:58.465523 6484 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:31:58.465531 6484 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:31:58.465540 6484 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:31:58.465679 6484 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:58.465824 6484 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:31:58.465885 6484 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 16:31:58.466116 6484 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:58.467512 6484 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:31:58.467811 6484 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.240038 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.255651 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.272574 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.295093 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.308029 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.308079 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.308090 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.308107 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.308119 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.311847 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.323331 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.334518 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.345195 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.359654 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.374368 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.385434 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.398562 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.411356 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.411424 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.411435 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.411452 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.411466 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.414811 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.429328 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.445476 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:31:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.514107 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.514194 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.514229 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.514270 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.514297 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.617530 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.617581 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.617593 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.617613 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.617626 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.720910 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.721007 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.721043 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.721079 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.721101 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.824253 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.824317 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.824382 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.824408 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.824434 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.839579 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.839641 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:31:59 crc kubenswrapper[5002]: E1203 16:31:59.839733 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:31:59 crc kubenswrapper[5002]: E1203 16:31:59.839886 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.928691 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.928800 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.928824 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.928851 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:31:59 crc kubenswrapper[5002]: I1203 16:31:59.928869 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:31:59Z","lastTransitionTime":"2025-12-03T16:31:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.032066 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.032134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.032153 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.032183 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.032202 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:00Z","lastTransitionTime":"2025-12-03T16:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.136013 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.136081 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.136094 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.136114 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.136130 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:00Z","lastTransitionTime":"2025-12-03T16:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.197410 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/1.log" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.239324 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.239371 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.239387 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.239408 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.239424 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:00Z","lastTransitionTime":"2025-12-03T16:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.342138 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.342195 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.342223 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.342248 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.342262 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:00Z","lastTransitionTime":"2025-12-03T16:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.446405 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.446470 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.446481 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.446502 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.446517 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:00Z","lastTransitionTime":"2025-12-03T16:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.549460 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.549518 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.549530 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.549554 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.549569 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:00Z","lastTransitionTime":"2025-12-03T16:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.653143 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.653189 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.653200 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.653216 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.653227 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:00Z","lastTransitionTime":"2025-12-03T16:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.756149 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.756216 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.756239 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.756269 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.756289 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:00Z","lastTransitionTime":"2025-12-03T16:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.840349 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.840504 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:00 crc kubenswrapper[5002]: E1203 16:32:00.840540 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:00 crc kubenswrapper[5002]: E1203 16:32:00.840808 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.858933 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.858977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.858989 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.859009 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:00 crc kubenswrapper[5002]: I1203 16:32:00.859022 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:00Z","lastTransitionTime":"2025-12-03T16:32:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.008315 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:01 crc kubenswrapper[5002]: E1203 16:32:01.008555 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:32:01 crc kubenswrapper[5002]: E1203 16:32:01.008625 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs podName:24141739-e7a8-40cf-ab9e-267ee876230b nodeName:}" failed. No retries permitted until 2025-12-03 16:32:05.008604567 +0000 UTC m=+48.422426465 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs") pod "network-metrics-daemon-c7qvw" (UID: "24141739-e7a8-40cf-ab9e-267ee876230b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.031000 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.031046 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.031059 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.031080 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.031093 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:01Z","lastTransitionTime":"2025-12-03T16:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.134285 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.134334 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.134346 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.134366 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.134378 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:01Z","lastTransitionTime":"2025-12-03T16:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.237832 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.237901 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.237914 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.237936 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.237954 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:01Z","lastTransitionTime":"2025-12-03T16:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.341571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.341651 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.341667 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.341691 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.341706 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:01Z","lastTransitionTime":"2025-12-03T16:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.445471 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.445535 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.445550 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.445572 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.445585 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:01Z","lastTransitionTime":"2025-12-03T16:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.548694 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.548790 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.548806 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.548833 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.548848 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:01Z","lastTransitionTime":"2025-12-03T16:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.652565 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.652626 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.652657 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.652684 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.652699 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:01Z","lastTransitionTime":"2025-12-03T16:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.755727 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.755809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.755820 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.755843 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.755857 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:01Z","lastTransitionTime":"2025-12-03T16:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.839588 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.839645 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:01 crc kubenswrapper[5002]: E1203 16:32:01.839965 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:01 crc kubenswrapper[5002]: E1203 16:32:01.840097 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.858605 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.858650 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.858661 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.858677 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.858694 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:01Z","lastTransitionTime":"2025-12-03T16:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.960919 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.960956 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.960965 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.960980 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:01 crc kubenswrapper[5002]: I1203 16:32:01.960991 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:01Z","lastTransitionTime":"2025-12-03T16:32:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.064826 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.064906 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.064919 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.064948 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.064963 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:02Z","lastTransitionTime":"2025-12-03T16:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.167980 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.168071 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.168091 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.168117 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.168136 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:02Z","lastTransitionTime":"2025-12-03T16:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.272099 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.272183 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.272203 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.272232 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.272256 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:02Z","lastTransitionTime":"2025-12-03T16:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.374503 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.374548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.374567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.374585 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.374600 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:02Z","lastTransitionTime":"2025-12-03T16:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.477815 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.477884 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.477894 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.477912 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.477922 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:02Z","lastTransitionTime":"2025-12-03T16:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.580546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.580607 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.580617 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.580638 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.580662 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:02Z","lastTransitionTime":"2025-12-03T16:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.683458 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.683511 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.683521 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.683541 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.683556 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:02Z","lastTransitionTime":"2025-12-03T16:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.785977 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.786023 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.786032 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.786067 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.786079 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:02Z","lastTransitionTime":"2025-12-03T16:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.840471 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.840478 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:02 crc kubenswrapper[5002]: E1203 16:32:02.840652 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:02 crc kubenswrapper[5002]: E1203 16:32:02.840832 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.889376 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.889438 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.889457 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.889487 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.889507 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:02Z","lastTransitionTime":"2025-12-03T16:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.993033 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.993125 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.993142 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.993160 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:02 crc kubenswrapper[5002]: I1203 16:32:02.993170 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:02Z","lastTransitionTime":"2025-12-03T16:32:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.095974 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.096015 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.096026 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.096043 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.096055 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:03Z","lastTransitionTime":"2025-12-03T16:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.199678 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.199730 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.199818 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.199841 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.199855 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:03Z","lastTransitionTime":"2025-12-03T16:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.302988 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.303054 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.303065 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.303081 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.303091 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:03Z","lastTransitionTime":"2025-12-03T16:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.406861 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.406907 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.406917 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.406949 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.406959 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:03Z","lastTransitionTime":"2025-12-03T16:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.509722 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.509788 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.509801 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.509817 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.509828 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:03Z","lastTransitionTime":"2025-12-03T16:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.612366 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.612402 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.612410 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.612423 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.612432 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:03Z","lastTransitionTime":"2025-12-03T16:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.714858 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.714926 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.714935 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.714950 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.714959 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:03Z","lastTransitionTime":"2025-12-03T16:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.818655 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.818696 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.818705 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.818723 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.818736 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:03Z","lastTransitionTime":"2025-12-03T16:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.840223 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.840334 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:03 crc kubenswrapper[5002]: E1203 16:32:03.840384 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:03 crc kubenswrapper[5002]: E1203 16:32:03.840531 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.922143 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.922231 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.922251 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.922285 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:03 crc kubenswrapper[5002]: I1203 16:32:03.922309 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:03Z","lastTransitionTime":"2025-12-03T16:32:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.025488 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.025540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.025550 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.025567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.025580 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:04Z","lastTransitionTime":"2025-12-03T16:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.129114 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.129177 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.129200 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.129224 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.129242 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:04Z","lastTransitionTime":"2025-12-03T16:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.231982 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.232064 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.232075 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.232100 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.232113 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:04Z","lastTransitionTime":"2025-12-03T16:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.335499 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.335541 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.335550 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.335564 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.335573 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:04Z","lastTransitionTime":"2025-12-03T16:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.438374 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.438425 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.438438 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.438456 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.438471 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:04Z","lastTransitionTime":"2025-12-03T16:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.541295 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.541354 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.541365 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.541386 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.541399 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:04Z","lastTransitionTime":"2025-12-03T16:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.645201 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.645270 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.645278 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.645292 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.645303 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:04Z","lastTransitionTime":"2025-12-03T16:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.749434 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.749508 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.749526 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.749554 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.749572 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:04Z","lastTransitionTime":"2025-12-03T16:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.840166 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:04 crc kubenswrapper[5002]: E1203 16:32:04.840372 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.840589 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:04 crc kubenswrapper[5002]: E1203 16:32:04.841006 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.851828 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.851897 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.851916 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.851945 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.851966 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:04Z","lastTransitionTime":"2025-12-03T16:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.955873 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.955952 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.955971 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.956003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:04 crc kubenswrapper[5002]: I1203 16:32:04.956023 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:04Z","lastTransitionTime":"2025-12-03T16:32:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.058410 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.058487 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.058497 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.058534 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.058545 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:05Z","lastTransitionTime":"2025-12-03T16:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.063243 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:05 crc kubenswrapper[5002]: E1203 16:32:05.063451 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:32:05 crc kubenswrapper[5002]: E1203 16:32:05.063541 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs podName:24141739-e7a8-40cf-ab9e-267ee876230b nodeName:}" failed. No retries permitted until 2025-12-03 16:32:13.063517277 +0000 UTC m=+56.477339165 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs") pod "network-metrics-daemon-c7qvw" (UID: "24141739-e7a8-40cf-ab9e-267ee876230b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.162075 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.162134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.162181 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.162209 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.162227 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:05Z","lastTransitionTime":"2025-12-03T16:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.265023 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.265076 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.265093 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.265126 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.265153 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:05Z","lastTransitionTime":"2025-12-03T16:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.368734 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.368858 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.368885 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.368919 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.368942 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:05Z","lastTransitionTime":"2025-12-03T16:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.471950 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.472001 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.472016 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.472041 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.472056 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:05Z","lastTransitionTime":"2025-12-03T16:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.575490 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.575571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.575595 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.575627 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.575708 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:05Z","lastTransitionTime":"2025-12-03T16:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.679104 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.679171 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.679201 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.679229 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.679247 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:05Z","lastTransitionTime":"2025-12-03T16:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.782644 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.782722 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.782772 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.782800 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.782817 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:05Z","lastTransitionTime":"2025-12-03T16:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.840284 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:05 crc kubenswrapper[5002]: E1203 16:32:05.840467 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.840907 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:05 crc kubenswrapper[5002]: E1203 16:32:05.841043 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.885317 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.885366 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.885382 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.885400 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.885413 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:05Z","lastTransitionTime":"2025-12-03T16:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.987832 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.987896 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.987918 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.987941 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:05 crc kubenswrapper[5002]: I1203 16:32:05.987959 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:05Z","lastTransitionTime":"2025-12-03T16:32:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.091638 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.091703 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.091870 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.091905 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.091930 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:06Z","lastTransitionTime":"2025-12-03T16:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.195204 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.195256 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.195266 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.195287 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.195301 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:06Z","lastTransitionTime":"2025-12-03T16:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.298247 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.298301 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.298313 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.298332 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.298346 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:06Z","lastTransitionTime":"2025-12-03T16:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.401926 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.401988 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.401998 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.402017 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.402027 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:06Z","lastTransitionTime":"2025-12-03T16:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.505333 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.505401 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.505433 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.505464 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.505487 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:06Z","lastTransitionTime":"2025-12-03T16:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.608532 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.608582 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.608592 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.608606 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.608616 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:06Z","lastTransitionTime":"2025-12-03T16:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.710883 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.710946 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.710964 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.710989 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.711025 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:06Z","lastTransitionTime":"2025-12-03T16:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.814377 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.814448 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.814459 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.814480 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.814492 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:06Z","lastTransitionTime":"2025-12-03T16:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.839884 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.839899 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:06 crc kubenswrapper[5002]: E1203 16:32:06.840081 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:06 crc kubenswrapper[5002]: E1203 16:32:06.840176 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.853378 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:06Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.870559 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:06Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.892311 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:31:56Z\\\",\\\"message\\\":\\\"4403 6270 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 16:31:56.684687 6270 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:56.684789 6270 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:31:56.684873 6270 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:31:56.684910 6270 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:31:56.684911 6270 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 16:31:56.684923 6270 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:31:56.684943 6270 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:31:56.684953 6270 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:31:56.684965 6270 factory.go:656] Stopping watch factory\\\\nI1203 16:31:56.684981 6270 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:31:56.685008 6270 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:31:56.685017 6270 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:31:58Z\\\",\\\"message\\\":\\\"\\\\nI1203 16:31:58.465514 6484 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:31:58.465523 6484 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:31:58.465531 6484 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:31:58.465540 6484 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:31:58.465679 6484 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:58.465824 6484 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:31:58.465885 6484 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 16:31:58.466116 6484 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:58.467512 6484 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:31:58.467811 6484 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:06Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.909340 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:06Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.916514 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.916557 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.916571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.916590 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.916603 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:06Z","lastTransitionTime":"2025-12-03T16:32:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.924858 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:06Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.935490 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:06Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.949377 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:06Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.962719 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:06Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.974449 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:06Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.986492 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:06Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:06 crc kubenswrapper[5002]: I1203 16:32:06.998268 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:06Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.008203 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.018533 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.019207 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.019244 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.019257 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.019277 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.019291 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:07Z","lastTransitionTime":"2025-12-03T16:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.030184 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.042832 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.052879 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.121562 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.121618 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.121630 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.121651 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.121664 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:07Z","lastTransitionTime":"2025-12-03T16:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.224086 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.224127 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.224136 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.224151 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.224163 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:07Z","lastTransitionTime":"2025-12-03T16:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.327107 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.327166 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.327176 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.327190 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.327201 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:07Z","lastTransitionTime":"2025-12-03T16:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.430189 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.430301 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.430324 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.430360 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.430385 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:07Z","lastTransitionTime":"2025-12-03T16:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.533873 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.533948 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.533959 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.533983 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.533996 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:07Z","lastTransitionTime":"2025-12-03T16:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.595469 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.595645 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.595679 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:32:39.595639799 +0000 UTC m=+83.009461687 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.595835 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.595839 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.595999 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:32:39.595988698 +0000 UTC m=+83.009810586 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.595876 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.596078 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:32:39.596061459 +0000 UTC m=+83.009883417 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.638258 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.638317 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.638336 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.638361 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.638379 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:07Z","lastTransitionTime":"2025-12-03T16:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.697065 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.697150 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.697254 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.697287 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.697301 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.697313 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.697344 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.697360 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.697377 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:32:39.697356703 +0000 UTC m=+83.111178591 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.697428 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:32:39.697405674 +0000 UTC m=+83.111227622 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.729218 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.738948 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.740569 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.740630 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.740641 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.740657 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.740686 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:07Z","lastTransitionTime":"2025-12-03T16:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.745961 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.763694 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.775006 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.791670 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.806116 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.821478 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.836726 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.839367 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.839527 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.839930 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:07 crc kubenswrapper[5002]: E1203 16:32:07.840034 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.843788 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.843831 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.843845 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.843865 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.843879 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:07Z","lastTransitionTime":"2025-12-03T16:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.850547 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.872849 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.888510 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.902868 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.915218 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.926538 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.947098 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.947140 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.947148 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.947162 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.947172 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:07Z","lastTransitionTime":"2025-12-03T16:32:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.947733 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.959383 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:07 crc kubenswrapper[5002]: I1203 16:32:07.977420 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28af698cca7f3b4b94819a440e3040fcc5c20945b00090158115dde07e536518\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:31:56Z\\\",\\\"message\\\":\\\"4403 6270 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 16:31:56.684687 6270 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:56.684789 6270 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:31:56.684873 6270 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:31:56.684910 6270 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:31:56.684911 6270 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 16:31:56.684923 6270 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:31:56.684943 6270 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:31:56.684953 6270 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:31:56.684965 6270 factory.go:656] Stopping watch factory\\\\nI1203 16:31:56.684981 6270 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:31:56.685008 6270 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:31:56.685017 6270 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:31:58Z\\\",\\\"message\\\":\\\"\\\\nI1203 16:31:58.465514 6484 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:31:58.465523 6484 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:31:58.465531 6484 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:31:58.465540 6484 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:31:58.465679 6484 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:58.465824 6484 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:31:58.465885 6484 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 16:31:58.466116 6484 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:58.467512 6484 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:31:58.467811 6484 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:07Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.049814 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.049874 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.049882 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.049898 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.049909 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:08Z","lastTransitionTime":"2025-12-03T16:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.152009 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.152058 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.152069 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.152084 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.152095 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:08Z","lastTransitionTime":"2025-12-03T16:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.254954 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.255005 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.255016 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.255036 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.255047 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:08Z","lastTransitionTime":"2025-12-03T16:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.357646 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.357700 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.357710 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.357727 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.357740 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:08Z","lastTransitionTime":"2025-12-03T16:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.461344 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.461416 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.461436 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.461462 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.461480 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:08Z","lastTransitionTime":"2025-12-03T16:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.564573 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.564656 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.564674 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.564711 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.564730 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:08Z","lastTransitionTime":"2025-12-03T16:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.667568 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.667775 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.667796 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.667817 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.667831 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:08Z","lastTransitionTime":"2025-12-03T16:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.770405 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.770452 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.770465 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.770483 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.770495 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:08Z","lastTransitionTime":"2025-12-03T16:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.839935 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.840089 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:08 crc kubenswrapper[5002]: E1203 16:32:08.840242 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:08 crc kubenswrapper[5002]: E1203 16:32:08.840472 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.874171 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.874239 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.874251 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.874271 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.874283 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:08Z","lastTransitionTime":"2025-12-03T16:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.977813 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.977865 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.977876 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.977896 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:08 crc kubenswrapper[5002]: I1203 16:32:08.977909 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:08Z","lastTransitionTime":"2025-12-03T16:32:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.080978 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.081072 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.081098 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.081133 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.081163 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.184015 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.184072 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.184082 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.184097 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.184109 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.286869 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.286927 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.286940 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.286963 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.286975 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.390133 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.390181 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.390190 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.390205 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.390217 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.493425 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.493483 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.493495 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.493514 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.493526 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.595384 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.596101 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.596210 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.596340 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.596446 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: E1203 16:32:09.612340 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:09Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.617481 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.617531 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.617549 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.617570 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.617586 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: E1203 16:32:09.638540 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:09Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.643828 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.643897 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.643912 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.643937 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.643952 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: E1203 16:32:09.658841 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:09Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.663250 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.663294 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.663306 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.663324 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.663336 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: E1203 16:32:09.676262 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:09Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.681002 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.681062 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.681074 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.681092 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.681102 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: E1203 16:32:09.696317 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:09Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:09 crc kubenswrapper[5002]: E1203 16:32:09.696446 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.698135 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.698177 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.698187 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.698208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.698218 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.800327 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.800368 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.800378 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.800399 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.800412 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.840267 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.840402 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:09 crc kubenswrapper[5002]: E1203 16:32:09.840438 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:09 crc kubenswrapper[5002]: E1203 16:32:09.840633 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.903143 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.903185 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.903196 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.903210 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:09 crc kubenswrapper[5002]: I1203 16:32:09.903219 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:09Z","lastTransitionTime":"2025-12-03T16:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.005718 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.005869 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.005884 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.005900 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.005912 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:10Z","lastTransitionTime":"2025-12-03T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.109082 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.109125 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.109136 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.109154 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.109167 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:10Z","lastTransitionTime":"2025-12-03T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.212006 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.212053 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.212062 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.212077 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.212088 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:10Z","lastTransitionTime":"2025-12-03T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.315033 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.315079 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.315089 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.315107 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.315121 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:10Z","lastTransitionTime":"2025-12-03T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.417561 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.417599 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.417642 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.417660 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.417673 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:10Z","lastTransitionTime":"2025-12-03T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.520527 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.520576 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.520584 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.520604 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.520614 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:10Z","lastTransitionTime":"2025-12-03T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.622564 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.622614 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.622622 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.622636 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.622646 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:10Z","lastTransitionTime":"2025-12-03T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.724761 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.724809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.724828 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.724847 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.724862 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:10Z","lastTransitionTime":"2025-12-03T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.827529 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.827591 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.827606 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.827628 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.827642 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:10Z","lastTransitionTime":"2025-12-03T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.839820 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:10 crc kubenswrapper[5002]: E1203 16:32:10.839941 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.839990 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:10 crc kubenswrapper[5002]: E1203 16:32:10.840132 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.930398 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.930473 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.930487 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.930505 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:10 crc kubenswrapper[5002]: I1203 16:32:10.930518 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:10Z","lastTransitionTime":"2025-12-03T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.033455 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.033523 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.033536 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.033555 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.033570 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:11Z","lastTransitionTime":"2025-12-03T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.136095 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.136172 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.136195 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.136222 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.136239 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:11Z","lastTransitionTime":"2025-12-03T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.238904 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.238990 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.239012 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.239042 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.239063 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:11Z","lastTransitionTime":"2025-12-03T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.341088 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.341161 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.341179 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.341198 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.341211 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:11Z","lastTransitionTime":"2025-12-03T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.382489 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.383787 5002 scope.go:117] "RemoveContainer" containerID="9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.403721 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.416965 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.436275 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:31:58Z\\\",\\\"message\\\":\\\"\\\\nI1203 16:31:58.465514 6484 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:31:58.465523 6484 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:31:58.465531 6484 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:31:58.465540 6484 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:31:58.465679 6484 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:58.465824 6484 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:31:58.465885 6484 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 16:31:58.466116 6484 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:58.467512 6484 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:31:58.467811 6484 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.443356 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.443396 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.443407 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.443423 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.443435 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:11Z","lastTransitionTime":"2025-12-03T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.451012 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.462086 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.480980 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.496580 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.510226 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62a3c6cd-0975-4166-933c-26ed4f9eae7a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.522531 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.542367 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.546166 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.546204 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.546266 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.546282 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.546292 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:11Z","lastTransitionTime":"2025-12-03T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.557324 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.568934 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.582108 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.593903 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.607117 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.615428 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.625432 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:11Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.648469 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.648508 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.648517 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.648534 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.648545 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:11Z","lastTransitionTime":"2025-12-03T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.751210 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.751413 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.751426 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.751444 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.751458 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:11Z","lastTransitionTime":"2025-12-03T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.839688 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.839734 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:11 crc kubenswrapper[5002]: E1203 16:32:11.839996 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:11 crc kubenswrapper[5002]: E1203 16:32:11.840063 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.853989 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.854060 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.854084 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.854113 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.854135 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:11Z","lastTransitionTime":"2025-12-03T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.957233 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.957280 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.957298 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.957322 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:11 crc kubenswrapper[5002]: I1203 16:32:11.957337 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:11Z","lastTransitionTime":"2025-12-03T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.060206 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.060248 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.060261 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.060278 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.060290 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:12Z","lastTransitionTime":"2025-12-03T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.162405 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.162445 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.162453 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.162466 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.162477 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:12Z","lastTransitionTime":"2025-12-03T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.246267 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/1.log" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.249409 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2"} Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.249915 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.265534 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.265592 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.265604 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.265622 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.265635 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:12Z","lastTransitionTime":"2025-12-03T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.269685 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.290386 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.314015 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:31:58Z\\\",\\\"message\\\":\\\"\\\\nI1203 16:31:58.465514 6484 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:31:58.465523 6484 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:31:58.465531 6484 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:31:58.465540 6484 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:31:58.465679 6484 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:58.465824 6484 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:31:58.465885 6484 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 16:31:58.466116 6484 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:58.467512 6484 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:31:58.467811 6484 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.327321 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.340622 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.356244 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.368059 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.368094 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.368104 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.368118 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.368129 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:12Z","lastTransitionTime":"2025-12-03T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.380200 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.394000 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62a3c6cd-0975-4166-933c-26ed4f9eae7a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.414365 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.432152 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.446386 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.459202 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.471425 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.471478 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.471492 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.471513 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.471528 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:12Z","lastTransitionTime":"2025-12-03T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.473790 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.490186 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.508086 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.524179 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.538345 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.573691 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.573796 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.573816 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.573848 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.573870 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:12Z","lastTransitionTime":"2025-12-03T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.676609 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.676649 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.676661 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.676679 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.676693 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:12Z","lastTransitionTime":"2025-12-03T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.779699 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.779802 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.779824 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.779848 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.779905 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:12Z","lastTransitionTime":"2025-12-03T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.840427 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.840524 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:12 crc kubenswrapper[5002]: E1203 16:32:12.840583 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:12 crc kubenswrapper[5002]: E1203 16:32:12.840728 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.883228 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.883476 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.883573 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.883672 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.883835 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:12Z","lastTransitionTime":"2025-12-03T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.986388 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.986440 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.986455 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.986477 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:12 crc kubenswrapper[5002]: I1203 16:32:12.986494 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:12Z","lastTransitionTime":"2025-12-03T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.089354 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.089405 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.089418 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.089437 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.089451 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:13Z","lastTransitionTime":"2025-12-03T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.152356 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:13 crc kubenswrapper[5002]: E1203 16:32:13.152569 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:32:13 crc kubenswrapper[5002]: E1203 16:32:13.152646 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs podName:24141739-e7a8-40cf-ab9e-267ee876230b nodeName:}" failed. No retries permitted until 2025-12-03 16:32:29.152624958 +0000 UTC m=+72.566446846 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs") pod "network-metrics-daemon-c7qvw" (UID: "24141739-e7a8-40cf-ab9e-267ee876230b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.192367 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.192410 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.192418 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.192432 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.192441 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:13Z","lastTransitionTime":"2025-12-03T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.255604 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/2.log" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.256236 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/1.log" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.259547 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2" exitCode=1 Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.259613 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2"} Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.259674 5002 scope.go:117] "RemoveContainer" containerID="9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.260729 5002 scope.go:117] "RemoveContainer" containerID="0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2" Dec 03 16:32:13 crc kubenswrapper[5002]: E1203 16:32:13.260987 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.281757 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f28941f4a1e69da0a5076899b8b419e2dbbdcaa45f0b97b2559e7f82205e2fd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:31:58Z\\\",\\\"message\\\":\\\"\\\\nI1203 16:31:58.465514 6484 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:31:58.465523 6484 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:31:58.465531 6484 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:31:58.465540 6484 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:31:58.465679 6484 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:58.465824 6484 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:31:58.465885 6484 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 16:31:58.466116 6484 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 16:31:58.467512 6484 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:31:58.467811 6484 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:12Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1203 16:32:12.516660 6624 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 16:32:12.516574 6624 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc after 0 failed attempt(s)\\\\nI1203 16:32:12.516390 6624 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-gjxps\\\\nF1203 16:32:12.516657 6624 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.295134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.295363 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.295435 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.295511 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.295575 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:13Z","lastTransitionTime":"2025-12-03T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.295982 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.310635 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.326465 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.345838 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.367389 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.382189 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.396676 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.398627 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.398691 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.398711 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.398735 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.398785 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:13Z","lastTransitionTime":"2025-12-03T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.409322 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.422504 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62a3c6cd-0975-4166-933c-26ed4f9eae7a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.442408 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.457482 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.469900 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.487503 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.501699 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.501836 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.501862 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.501898 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.501923 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:13Z","lastTransitionTime":"2025-12-03T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.502359 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.516143 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.530872 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.604630 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.604694 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.604708 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.604731 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.604772 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:13Z","lastTransitionTime":"2025-12-03T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.708010 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.708042 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.708051 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.708064 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.708074 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:13Z","lastTransitionTime":"2025-12-03T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.810762 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.810811 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.810820 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.810836 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.810847 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:13Z","lastTransitionTime":"2025-12-03T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.840339 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.840400 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:13 crc kubenswrapper[5002]: E1203 16:32:13.840538 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:13 crc kubenswrapper[5002]: E1203 16:32:13.840632 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.913001 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.913077 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.913094 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.913123 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:13 crc kubenswrapper[5002]: I1203 16:32:13.913143 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:13Z","lastTransitionTime":"2025-12-03T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.016970 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.017028 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.017040 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.017059 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.017075 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:14Z","lastTransitionTime":"2025-12-03T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.120122 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.120195 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.120209 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.120230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.120245 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:14Z","lastTransitionTime":"2025-12-03T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.223432 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.223486 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.223498 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.223518 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.223531 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:14Z","lastTransitionTime":"2025-12-03T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.265866 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/2.log" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.269969 5002 scope.go:117] "RemoveContainer" containerID="0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2" Dec 03 16:32:14 crc kubenswrapper[5002]: E1203 16:32:14.270125 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.291411 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.309542 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.326272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.326323 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.326334 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.326355 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.326369 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:14Z","lastTransitionTime":"2025-12-03T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.340882 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:12Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1203 16:32:12.516660 6624 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 16:32:12.516574 6624 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc after 0 failed attempt(s)\\\\nI1203 16:32:12.516390 6624 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-gjxps\\\\nF1203 16:32:12.516657 6624 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.355377 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.368617 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.386992 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.407502 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.420459 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62a3c6cd-0975-4166-933c-26ed4f9eae7a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.429141 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.429195 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.429207 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.429228 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.429240 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:14Z","lastTransitionTime":"2025-12-03T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.437695 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.454417 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.465640 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.475227 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.486704 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.498411 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.513259 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.524276 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.531795 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.531856 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.531870 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.531893 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.531908 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:14Z","lastTransitionTime":"2025-12-03T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.534404 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.636336 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.636498 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.636526 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.636564 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.636593 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:14Z","lastTransitionTime":"2025-12-03T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.739773 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.739825 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.739842 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.739861 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.739871 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:14Z","lastTransitionTime":"2025-12-03T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.839818 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.839821 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:14 crc kubenswrapper[5002]: E1203 16:32:14.840065 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:14 crc kubenswrapper[5002]: E1203 16:32:14.840150 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.843423 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.843491 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.843501 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.843517 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.843550 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:14Z","lastTransitionTime":"2025-12-03T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.946465 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.946539 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.946549 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.946567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:14 crc kubenswrapper[5002]: I1203 16:32:14.946577 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:14Z","lastTransitionTime":"2025-12-03T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.050691 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.050816 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.050845 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.050878 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.050896 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:15Z","lastTransitionTime":"2025-12-03T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.153407 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.153480 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.153496 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.153531 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.153558 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:15Z","lastTransitionTime":"2025-12-03T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.257360 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.257642 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.257777 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.257855 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.257919 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:15Z","lastTransitionTime":"2025-12-03T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.360767 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.360821 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.360836 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.360856 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.360871 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:15Z","lastTransitionTime":"2025-12-03T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.463942 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.463990 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.464003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.464020 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.464033 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:15Z","lastTransitionTime":"2025-12-03T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.566516 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.566571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.566587 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.566607 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.566622 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:15Z","lastTransitionTime":"2025-12-03T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.668711 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.668771 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.668780 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.668796 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.668807 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:15Z","lastTransitionTime":"2025-12-03T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.771841 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.771884 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.771897 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.771915 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.771924 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:15Z","lastTransitionTime":"2025-12-03T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.839889 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.839963 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:15 crc kubenswrapper[5002]: E1203 16:32:15.840085 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:15 crc kubenswrapper[5002]: E1203 16:32:15.840251 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.873941 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.873984 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.873994 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.874012 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.874021 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:15Z","lastTransitionTime":"2025-12-03T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.977540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.977634 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.977659 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.977696 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:15 crc kubenswrapper[5002]: I1203 16:32:15.977724 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:15Z","lastTransitionTime":"2025-12-03T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.080603 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.080689 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.080734 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.080822 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.080847 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:16Z","lastTransitionTime":"2025-12-03T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.183880 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.183914 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.183925 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.183940 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.183950 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:16Z","lastTransitionTime":"2025-12-03T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.286573 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.286629 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.286639 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.286661 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.286683 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:16Z","lastTransitionTime":"2025-12-03T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.389638 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.389700 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.389712 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.389735 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.389773 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:16Z","lastTransitionTime":"2025-12-03T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.492071 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.492125 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.492138 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.492153 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.492164 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:16Z","lastTransitionTime":"2025-12-03T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.595587 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.595649 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.595657 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.595678 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.595690 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:16Z","lastTransitionTime":"2025-12-03T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.699151 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.699230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.699244 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.699267 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.699282 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:16Z","lastTransitionTime":"2025-12-03T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.802134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.802191 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.802208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.802232 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.802249 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:16Z","lastTransitionTime":"2025-12-03T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.840425 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.840507 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:16 crc kubenswrapper[5002]: E1203 16:32:16.840605 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:16 crc kubenswrapper[5002]: E1203 16:32:16.840768 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.854406 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62a3c6cd-0975-4166-933c-26ed4f9eae7a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.869451 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.889986 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.903283 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.905379 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.905419 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.905429 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.905454 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.905466 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:16Z","lastTransitionTime":"2025-12-03T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.916663 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.935256 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.951120 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.966223 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.978239 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:16 crc kubenswrapper[5002]: I1203 16:32:16.992107 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.007365 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:17Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.008924 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.008959 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.008972 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.008991 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.009005 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:17Z","lastTransitionTime":"2025-12-03T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.020053 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:17Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.040887 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:12Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1203 16:32:12.516660 6624 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 16:32:12.516574 6624 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc after 0 failed attempt(s)\\\\nI1203 16:32:12.516390 6624 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-gjxps\\\\nF1203 16:32:12.516657 6624 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:17Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.054505 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:17Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.070064 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:17Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.087247 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:17Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.108913 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:17Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.111036 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.111088 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.111098 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.111115 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.111125 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:17Z","lastTransitionTime":"2025-12-03T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.214913 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.214980 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.215000 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.215028 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.215049 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:17Z","lastTransitionTime":"2025-12-03T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.319491 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.319563 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.319576 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.319593 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.319606 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:17Z","lastTransitionTime":"2025-12-03T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.422321 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.422377 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.422389 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.422407 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.422696 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:17Z","lastTransitionTime":"2025-12-03T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.526181 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.526307 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.526320 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.526344 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.526360 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:17Z","lastTransitionTime":"2025-12-03T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.630513 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.630597 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.630614 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.630643 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.630664 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:17Z","lastTransitionTime":"2025-12-03T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.737689 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.737772 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.737790 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.737810 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.737824 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:17Z","lastTransitionTime":"2025-12-03T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.839275 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:17 crc kubenswrapper[5002]: E1203 16:32:17.839399 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.839815 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:17 crc kubenswrapper[5002]: E1203 16:32:17.840055 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.841128 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.841158 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.841168 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.841185 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.841197 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:17Z","lastTransitionTime":"2025-12-03T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.944193 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.944277 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.944293 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.944316 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:17 crc kubenswrapper[5002]: I1203 16:32:17.944328 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:17Z","lastTransitionTime":"2025-12-03T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.047834 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.047880 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.047891 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.047912 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.047922 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:18Z","lastTransitionTime":"2025-12-03T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.150618 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.150651 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.150659 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.150674 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.150683 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:18Z","lastTransitionTime":"2025-12-03T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.253475 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.253741 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.253843 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.253926 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.253990 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:18Z","lastTransitionTime":"2025-12-03T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.356368 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.356404 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.356412 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.356425 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.356436 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:18Z","lastTransitionTime":"2025-12-03T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.459769 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.459813 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.459823 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.459839 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.459850 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:18Z","lastTransitionTime":"2025-12-03T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.563321 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.563399 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.563421 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.563451 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.563473 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:18Z","lastTransitionTime":"2025-12-03T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.666790 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.666837 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.666851 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.666872 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.666886 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:18Z","lastTransitionTime":"2025-12-03T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.769179 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.769220 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.769230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.769247 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.769259 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:18Z","lastTransitionTime":"2025-12-03T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.839871 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:18 crc kubenswrapper[5002]: E1203 16:32:18.840014 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.840241 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:18 crc kubenswrapper[5002]: E1203 16:32:18.840303 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.871241 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.871555 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.871640 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.871739 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.871844 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:18Z","lastTransitionTime":"2025-12-03T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.975125 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.975175 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.975188 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.975261 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:18 crc kubenswrapper[5002]: I1203 16:32:18.975277 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:18Z","lastTransitionTime":"2025-12-03T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.078845 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.078911 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.078922 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.078942 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.078953 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.181642 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.181721 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.181732 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.181766 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.181780 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.284116 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.284213 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.284241 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.284313 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.284337 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.386945 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.387007 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.387027 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.387057 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.387077 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.489307 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.489344 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.489353 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.489366 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.489377 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.592685 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.592862 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.592880 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.592899 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.592913 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.695974 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.696035 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.696053 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.696077 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.696094 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.798186 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.798254 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.798272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.798297 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.798317 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.840365 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.840436 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:19 crc kubenswrapper[5002]: E1203 16:32:19.840779 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:19 crc kubenswrapper[5002]: E1203 16:32:19.840909 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.855779 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.897254 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.897303 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.897316 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.897337 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.897351 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: E1203 16:32:19.914363 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:19Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.918571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.918605 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.918617 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.918632 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.918642 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: E1203 16:32:19.936104 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:19Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.941365 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.941415 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.941429 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.941449 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.941463 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: E1203 16:32:19.956759 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:19Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.960563 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.960667 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.960712 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.960772 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.960790 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: E1203 16:32:19.978822 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:19Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.982414 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.982442 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.982454 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.982497 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.982511 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:19 crc kubenswrapper[5002]: E1203 16:32:19.996638 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:19Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:19 crc kubenswrapper[5002]: E1203 16:32:19.996776 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.998268 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.998299 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.998310 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.998325 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:19 crc kubenswrapper[5002]: I1203 16:32:19.998338 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:19Z","lastTransitionTime":"2025-12-03T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.100892 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.100941 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.100950 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.100966 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.100974 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:20Z","lastTransitionTime":"2025-12-03T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.203119 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.203163 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.203174 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.203190 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.203201 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:20Z","lastTransitionTime":"2025-12-03T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.305589 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.305638 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.305649 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.305665 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.305677 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:20Z","lastTransitionTime":"2025-12-03T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.409175 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.409222 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.409233 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.409250 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.409260 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:20Z","lastTransitionTime":"2025-12-03T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.512166 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.512204 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.512214 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.512232 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.512245 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:20Z","lastTransitionTime":"2025-12-03T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.615254 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.615300 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.615316 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.615339 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.615355 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:20Z","lastTransitionTime":"2025-12-03T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.718003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.718066 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.718081 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.718100 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.718114 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:20Z","lastTransitionTime":"2025-12-03T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.820792 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.820847 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.820862 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.820883 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.820901 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:20Z","lastTransitionTime":"2025-12-03T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.840038 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:20 crc kubenswrapper[5002]: E1203 16:32:20.840184 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.840042 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:20 crc kubenswrapper[5002]: E1203 16:32:20.840383 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.923293 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.923336 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.923347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.923366 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:20 crc kubenswrapper[5002]: I1203 16:32:20.923377 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:20Z","lastTransitionTime":"2025-12-03T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.026354 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.026389 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.026399 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.026414 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.026424 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:21Z","lastTransitionTime":"2025-12-03T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.129203 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.129259 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.129274 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.129294 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.129307 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:21Z","lastTransitionTime":"2025-12-03T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.233138 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.233194 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.233205 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.233228 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.233248 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:21Z","lastTransitionTime":"2025-12-03T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.336011 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.336055 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.336065 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.336081 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.336093 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:21Z","lastTransitionTime":"2025-12-03T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.438676 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.438774 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.438795 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.438820 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.438836 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:21Z","lastTransitionTime":"2025-12-03T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.542726 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.542828 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.542844 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.542867 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.542880 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:21Z","lastTransitionTime":"2025-12-03T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.645778 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.645822 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.645829 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.645849 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.645864 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:21Z","lastTransitionTime":"2025-12-03T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.749167 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.749230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.749247 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.749271 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.749284 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:21Z","lastTransitionTime":"2025-12-03T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.840234 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.840356 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:21 crc kubenswrapper[5002]: E1203 16:32:21.840465 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:21 crc kubenswrapper[5002]: E1203 16:32:21.840600 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.853087 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.853148 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.853166 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.853200 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.853219 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:21Z","lastTransitionTime":"2025-12-03T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.955559 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.955640 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.955656 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.955673 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:21 crc kubenswrapper[5002]: I1203 16:32:21.955686 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:21Z","lastTransitionTime":"2025-12-03T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.059022 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.059076 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.059088 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.059110 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.059123 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:22Z","lastTransitionTime":"2025-12-03T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.162006 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.162050 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.162060 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.162077 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.162088 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:22Z","lastTransitionTime":"2025-12-03T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.265834 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.265900 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.265936 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.265964 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.265980 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:22Z","lastTransitionTime":"2025-12-03T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.369868 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.369923 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.369936 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.369959 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.369972 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:22Z","lastTransitionTime":"2025-12-03T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.473025 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.473455 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.473533 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.473625 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.473727 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:22Z","lastTransitionTime":"2025-12-03T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.581640 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.581716 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.581729 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.582280 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.582422 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:22Z","lastTransitionTime":"2025-12-03T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.687389 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.687476 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.687488 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.687507 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.687516 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:22Z","lastTransitionTime":"2025-12-03T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.790013 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.790108 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.790126 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.790152 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.790171 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:22Z","lastTransitionTime":"2025-12-03T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.839849 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.839920 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:22 crc kubenswrapper[5002]: E1203 16:32:22.840061 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:22 crc kubenswrapper[5002]: E1203 16:32:22.840274 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.893070 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.893146 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.893161 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.893184 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.893199 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:22Z","lastTransitionTime":"2025-12-03T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.996162 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.996239 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.996251 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.996285 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:22 crc kubenswrapper[5002]: I1203 16:32:22.996301 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:22Z","lastTransitionTime":"2025-12-03T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.098986 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.099049 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.099066 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.099089 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.099103 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:23Z","lastTransitionTime":"2025-12-03T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.201962 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.202026 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.202037 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.202075 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.202086 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:23Z","lastTransitionTime":"2025-12-03T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.305001 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.305064 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.305075 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.305097 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.305108 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:23Z","lastTransitionTime":"2025-12-03T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.408246 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.408288 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.408297 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.408313 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.408323 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:23Z","lastTransitionTime":"2025-12-03T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.511643 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.511709 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.511719 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.511737 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.511765 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:23Z","lastTransitionTime":"2025-12-03T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.614802 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.614871 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.614883 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.614902 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.614918 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:23Z","lastTransitionTime":"2025-12-03T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.717679 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.717733 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.717784 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.717804 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.717816 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:23Z","lastTransitionTime":"2025-12-03T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.821177 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.821235 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.821247 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.821268 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.821283 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:23Z","lastTransitionTime":"2025-12-03T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.839304 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.839306 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:23 crc kubenswrapper[5002]: E1203 16:32:23.839448 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:23 crc kubenswrapper[5002]: E1203 16:32:23.839550 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.924227 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.924260 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.924270 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.924282 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:23 crc kubenswrapper[5002]: I1203 16:32:23.924292 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:23Z","lastTransitionTime":"2025-12-03T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.028622 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.028914 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.029032 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.029138 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.029227 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:24Z","lastTransitionTime":"2025-12-03T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.132415 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.132476 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.132485 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.132507 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.132521 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:24Z","lastTransitionTime":"2025-12-03T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.236270 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.236323 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.236333 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.236352 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.236364 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:24Z","lastTransitionTime":"2025-12-03T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.338737 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.338825 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.338837 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.338874 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.338887 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:24Z","lastTransitionTime":"2025-12-03T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.442681 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.442739 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.442769 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.442793 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.442807 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:24Z","lastTransitionTime":"2025-12-03T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.546006 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.546082 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.546102 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.546121 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.546133 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:24Z","lastTransitionTime":"2025-12-03T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.649509 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.649567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.649582 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.649604 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.649626 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:24Z","lastTransitionTime":"2025-12-03T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.753254 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.753313 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.753324 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.753348 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.753361 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:24Z","lastTransitionTime":"2025-12-03T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.840269 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.840344 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:24 crc kubenswrapper[5002]: E1203 16:32:24.840451 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:24 crc kubenswrapper[5002]: E1203 16:32:24.840532 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.856292 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.856343 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.856354 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.856374 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.856388 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:24Z","lastTransitionTime":"2025-12-03T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.960022 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.960095 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.960110 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.960155 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:24 crc kubenswrapper[5002]: I1203 16:32:24.960173 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:24Z","lastTransitionTime":"2025-12-03T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.063000 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.063047 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.063057 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.063075 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.063085 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:25Z","lastTransitionTime":"2025-12-03T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.165924 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.165978 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.165997 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.166027 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.166041 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:25Z","lastTransitionTime":"2025-12-03T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.268905 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.268947 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.268956 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.268972 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.268984 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:25Z","lastTransitionTime":"2025-12-03T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.371582 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.371623 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.371633 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.371654 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.371665 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:25Z","lastTransitionTime":"2025-12-03T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.474519 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.474554 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.474564 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.474582 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.474594 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:25Z","lastTransitionTime":"2025-12-03T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.580104 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.580180 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.580203 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.580241 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.580286 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:25Z","lastTransitionTime":"2025-12-03T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.682801 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.682855 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.682867 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.682886 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.682901 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:25Z","lastTransitionTime":"2025-12-03T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.785512 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.785592 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.785611 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.785641 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.785661 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:25Z","lastTransitionTime":"2025-12-03T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.839736 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.839906 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:25 crc kubenswrapper[5002]: E1203 16:32:25.840018 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:25 crc kubenswrapper[5002]: E1203 16:32:25.840107 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.888614 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.888678 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.888689 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.888710 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.888723 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:25Z","lastTransitionTime":"2025-12-03T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.992003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.992055 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.992067 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.992087 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:25 crc kubenswrapper[5002]: I1203 16:32:25.992098 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:25Z","lastTransitionTime":"2025-12-03T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.095153 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.095207 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.095219 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.095241 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.095254 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:26Z","lastTransitionTime":"2025-12-03T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.197636 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.197684 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.197694 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.197710 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.197719 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:26Z","lastTransitionTime":"2025-12-03T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.299575 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.299612 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.299622 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.299636 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.299644 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:26Z","lastTransitionTime":"2025-12-03T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.402279 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.402347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.402360 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.402384 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.402400 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:26Z","lastTransitionTime":"2025-12-03T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.505774 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.505821 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.505831 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.505849 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.505862 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:26Z","lastTransitionTime":"2025-12-03T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.608404 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.608438 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.608446 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.608460 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.608469 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:26Z","lastTransitionTime":"2025-12-03T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.710969 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.711016 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.711033 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.711055 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.711074 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:26Z","lastTransitionTime":"2025-12-03T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.813759 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.813825 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.813836 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.813852 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.813862 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:26Z","lastTransitionTime":"2025-12-03T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.840074 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.840156 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:26 crc kubenswrapper[5002]: E1203 16:32:26.840273 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:26 crc kubenswrapper[5002]: E1203 16:32:26.840391 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.841650 5002 scope.go:117] "RemoveContainer" containerID="0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2" Dec 03 16:32:26 crc kubenswrapper[5002]: E1203 16:32:26.841961 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.854446 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.867379 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.887730 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.903086 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.916199 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.916347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.916705 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.916823 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.916908 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:26Z","lastTransitionTime":"2025-12-03T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.919703 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.932991 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.943406 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.956035 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.968266 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62a3c6cd-0975-4166-933c-26ed4f9eae7a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.982729 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:26 crc kubenswrapper[5002]: I1203 16:32:26.997678 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.013619 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:27Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.019802 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.019944 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.020030 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.020134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.020228 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:27Z","lastTransitionTime":"2025-12-03T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.033877 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:27Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.051074 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:27Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.066621 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:27Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.080957 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:27Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.099487 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:12Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1203 16:32:12.516660 6624 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 16:32:12.516574 6624 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc after 0 failed attempt(s)\\\\nI1203 16:32:12.516390 6624 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-gjxps\\\\nF1203 16:32:12.516657 6624 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:27Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.108562 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"546767f2-efb8-4aa3-82bd-07763628a15c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cc1b69d0fae8296c84934db54ef2ac95c589188d7060be0dd5d0a55429e24a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:27Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.122534 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.122594 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.122607 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.122624 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.122658 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:27Z","lastTransitionTime":"2025-12-03T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.225003 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.225310 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.225411 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.225498 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.225574 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:27Z","lastTransitionTime":"2025-12-03T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.328439 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.328501 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.328520 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.328543 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.328562 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:27Z","lastTransitionTime":"2025-12-03T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.431625 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.431716 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.431742 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.431821 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.431846 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:27Z","lastTransitionTime":"2025-12-03T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.534501 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.534554 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.534565 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.534582 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.534594 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:27Z","lastTransitionTime":"2025-12-03T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.638100 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.638156 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.638169 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.638191 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.638207 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:27Z","lastTransitionTime":"2025-12-03T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.741076 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.741167 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.741183 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.741208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.741225 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:27Z","lastTransitionTime":"2025-12-03T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.839361 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.839437 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:27 crc kubenswrapper[5002]: E1203 16:32:27.839508 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:27 crc kubenswrapper[5002]: E1203 16:32:27.839584 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.844307 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.844342 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.844351 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.844366 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.844378 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:27Z","lastTransitionTime":"2025-12-03T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.946678 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.946717 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.946728 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.946775 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:27 crc kubenswrapper[5002]: I1203 16:32:27.946785 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:27Z","lastTransitionTime":"2025-12-03T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.049680 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.049729 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.049738 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.049782 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.049804 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:28Z","lastTransitionTime":"2025-12-03T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.152611 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.152664 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.152672 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.152691 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.152702 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:28Z","lastTransitionTime":"2025-12-03T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.255939 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.255976 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.255985 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.256000 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.256011 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:28Z","lastTransitionTime":"2025-12-03T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.358696 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.358735 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.358789 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.358804 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.358812 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:28Z","lastTransitionTime":"2025-12-03T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.462021 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.462090 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.462106 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.462130 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.462145 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:28Z","lastTransitionTime":"2025-12-03T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.564740 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.564842 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.564866 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.564894 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.564914 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:28Z","lastTransitionTime":"2025-12-03T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.667797 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.667831 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.667841 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.667856 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.667869 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:28Z","lastTransitionTime":"2025-12-03T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.770464 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.770521 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.770531 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.770552 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.770565 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:28Z","lastTransitionTime":"2025-12-03T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.839799 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.839884 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:28 crc kubenswrapper[5002]: E1203 16:32:28.839992 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:28 crc kubenswrapper[5002]: E1203 16:32:28.840103 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.874035 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.874099 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.874121 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.874153 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.874172 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:28Z","lastTransitionTime":"2025-12-03T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.978117 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.978188 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.978212 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.978239 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:28 crc kubenswrapper[5002]: I1203 16:32:28.978259 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:28Z","lastTransitionTime":"2025-12-03T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.081246 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.081347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.081381 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.081421 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.081450 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:29Z","lastTransitionTime":"2025-12-03T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.183846 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.183916 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.183934 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.183959 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.183977 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:29Z","lastTransitionTime":"2025-12-03T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.237718 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:29 crc kubenswrapper[5002]: E1203 16:32:29.237957 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:32:29 crc kubenswrapper[5002]: E1203 16:32:29.238097 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs podName:24141739-e7a8-40cf-ab9e-267ee876230b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:01.238060943 +0000 UTC m=+104.651882871 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs") pod "network-metrics-daemon-c7qvw" (UID: "24141739-e7a8-40cf-ab9e-267ee876230b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.286273 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.286333 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.286350 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.286378 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.286395 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:29Z","lastTransitionTime":"2025-12-03T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.389900 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.389972 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.389992 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.390019 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.390042 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:29Z","lastTransitionTime":"2025-12-03T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.492938 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.493012 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.493030 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.493057 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.493076 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:29Z","lastTransitionTime":"2025-12-03T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.596029 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.596084 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.596102 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.596128 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.596151 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:29Z","lastTransitionTime":"2025-12-03T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.699389 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.699488 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.699511 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.699533 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.699548 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:29Z","lastTransitionTime":"2025-12-03T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.801724 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.801814 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.801828 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.801850 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.801865 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:29Z","lastTransitionTime":"2025-12-03T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.840257 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.840254 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:29 crc kubenswrapper[5002]: E1203 16:32:29.840400 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:29 crc kubenswrapper[5002]: E1203 16:32:29.840541 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.905207 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.905253 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.905265 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.905282 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:29 crc kubenswrapper[5002]: I1203 16:32:29.905294 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:29Z","lastTransitionTime":"2025-12-03T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.007622 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.007707 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.007720 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.007769 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.007785 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.110070 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.110138 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.110156 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.110182 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.110199 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.212814 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.212856 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.212866 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.212880 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.212891 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.257769 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.257814 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.257841 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.257860 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.257870 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: E1203 16:32:30.272271 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:30Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.277166 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.277234 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.277249 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.277272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.277285 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: E1203 16:32:30.296056 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:30Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.302412 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.302468 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.302480 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.302502 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.302518 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: E1203 16:32:30.316493 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:30Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.323138 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.323200 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.323230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.323258 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.323277 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: E1203 16:32:30.339242 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:30Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.343289 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.343319 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.343331 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.343349 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.343363 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: E1203 16:32:30.360079 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:30Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:30 crc kubenswrapper[5002]: E1203 16:32:30.360248 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.363312 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.363341 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.363353 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.363366 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.363376 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.466056 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.466113 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.466125 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.466139 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.466149 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.568589 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.568620 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.568628 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.568643 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.568652 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.671111 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.671167 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.671176 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.671189 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.671197 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.774645 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.774697 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.774706 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.774726 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.774736 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.839622 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.839636 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:30 crc kubenswrapper[5002]: E1203 16:32:30.839763 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:30 crc kubenswrapper[5002]: E1203 16:32:30.839877 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.878243 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.878278 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.878287 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.878303 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.878315 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.980180 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.980232 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.980248 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.980268 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:30 crc kubenswrapper[5002]: I1203 16:32:30.980281 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:30Z","lastTransitionTime":"2025-12-03T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.082418 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.082456 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.082466 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.082482 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.082491 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:31Z","lastTransitionTime":"2025-12-03T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.185216 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.185274 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.185289 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.185312 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.185328 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:31Z","lastTransitionTime":"2025-12-03T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.288387 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.288428 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.288441 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.288458 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.288471 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:31Z","lastTransitionTime":"2025-12-03T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.391351 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.391396 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.391405 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.391424 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.391435 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:31Z","lastTransitionTime":"2025-12-03T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.494329 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.494376 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.494388 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.494405 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.494419 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:31Z","lastTransitionTime":"2025-12-03T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.596575 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.596610 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.596617 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.596633 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.596643 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:31Z","lastTransitionTime":"2025-12-03T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.699179 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.699230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.699242 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.699260 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.699274 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:31Z","lastTransitionTime":"2025-12-03T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.802470 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.802528 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.802548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.802568 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.802582 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:31Z","lastTransitionTime":"2025-12-03T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.840168 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.840168 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:31 crc kubenswrapper[5002]: E1203 16:32:31.840917 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:31 crc kubenswrapper[5002]: E1203 16:32:31.842968 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.905517 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.905577 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.905589 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.905605 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:31 crc kubenswrapper[5002]: I1203 16:32:31.905615 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:31Z","lastTransitionTime":"2025-12-03T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.009484 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.009548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.009567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.009595 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.009614 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:32Z","lastTransitionTime":"2025-12-03T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.112558 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.112633 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.112655 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.112682 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.112701 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:32Z","lastTransitionTime":"2025-12-03T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.215733 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.215832 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.215845 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.215864 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.215878 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:32Z","lastTransitionTime":"2025-12-03T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.318364 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.318460 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.318477 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.318508 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.318525 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:32Z","lastTransitionTime":"2025-12-03T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.339086 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gjxps_2de485fd-67c0-4be7-abb1-92509ea373da/kube-multus/0.log" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.339159 5002 generic.go:334] "Generic (PLEG): container finished" podID="2de485fd-67c0-4be7-abb1-92509ea373da" containerID="cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4" exitCode=1 Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.339201 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gjxps" event={"ID":"2de485fd-67c0-4be7-abb1-92509ea373da","Type":"ContainerDied","Data":"cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4"} Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.339715 5002 scope.go:117] "RemoveContainer" containerID="cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.358318 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.376579 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.401579 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:12Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1203 16:32:12.516660 6624 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 16:32:12.516574 6624 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc after 0 failed attempt(s)\\\\nI1203 16:32:12.516390 6624 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-gjxps\\\\nF1203 16:32:12.516657 6624 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.416176 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"546767f2-efb8-4aa3-82bd-07763628a15c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cc1b69d0fae8296c84934db54ef2ac95c589188d7060be0dd5d0a55429e24a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.421829 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.421877 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.421889 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.421906 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.421917 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:32Z","lastTransitionTime":"2025-12-03T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.432719 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.443032 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.497534 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:31Z\\\",\\\"message\\\":\\\"2025-12-03T16:31:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_79712779-2aec-42b5-a526-4bf63ddc226a\\\\n2025-12-03T16:31:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_79712779-2aec-42b5-a526-4bf63ddc226a to /host/opt/cni/bin/\\\\n2025-12-03T16:31:46Z [verbose] multus-daemon started\\\\n2025-12-03T16:31:46Z [verbose] Readiness Indicator file check\\\\n2025-12-03T16:32:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.523335 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.524734 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.524811 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.524826 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.524846 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.524858 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:32Z","lastTransitionTime":"2025-12-03T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.539470 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.558203 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.570626 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.584707 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.600611 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62a3c6cd-0975-4166-933c-26ed4f9eae7a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.614965 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.627223 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.627286 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.627299 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.627317 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.627331 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:32Z","lastTransitionTime":"2025-12-03T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.628654 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.640560 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.654171 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.670060 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:32Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.729878 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.729927 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.729938 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.729954 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.729964 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:32Z","lastTransitionTime":"2025-12-03T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.832359 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.832412 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.832423 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.832446 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.832463 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:32Z","lastTransitionTime":"2025-12-03T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.839688 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:32 crc kubenswrapper[5002]: E1203 16:32:32.839895 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.839695 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:32 crc kubenswrapper[5002]: E1203 16:32:32.840173 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.934622 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.934663 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.934672 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.934694 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:32 crc kubenswrapper[5002]: I1203 16:32:32.934706 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:32Z","lastTransitionTime":"2025-12-03T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.038354 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.038411 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.038420 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.038436 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.038447 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:33Z","lastTransitionTime":"2025-12-03T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.140665 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.141369 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.141419 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.141449 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.141469 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:33Z","lastTransitionTime":"2025-12-03T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.244822 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.244897 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.244962 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.244994 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.245016 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:33Z","lastTransitionTime":"2025-12-03T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.345506 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gjxps_2de485fd-67c0-4be7-abb1-92509ea373da/kube-multus/0.log" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.345565 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gjxps" event={"ID":"2de485fd-67c0-4be7-abb1-92509ea373da","Type":"ContainerStarted","Data":"46d73cb93f4e004b38542b0972ec1eab4e61b4698c14cef11eae5b5aeb61c233"} Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.347354 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.347406 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.347419 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.347437 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.347448 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:33Z","lastTransitionTime":"2025-12-03T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.362157 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.389831 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:12Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1203 16:32:12.516660 6624 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 16:32:12.516574 6624 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc after 0 failed attempt(s)\\\\nI1203 16:32:12.516390 6624 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-gjxps\\\\nF1203 16:32:12.516657 6624 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.414212 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"546767f2-efb8-4aa3-82bd-07763628a15c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cc1b69d0fae8296c84934db54ef2ac95c589188d7060be0dd5d0a55429e24a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.436641 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.450829 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.450878 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.450891 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.450911 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.450924 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:33Z","lastTransitionTime":"2025-12-03T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.454933 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.475485 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46d73cb93f4e004b38542b0972ec1eab4e61b4698c14cef11eae5b5aeb61c233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:31Z\\\",\\\"message\\\":\\\"2025-12-03T16:31:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_79712779-2aec-42b5-a526-4bf63ddc226a\\\\n2025-12-03T16:31:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_79712779-2aec-42b5-a526-4bf63ddc226a to /host/opt/cni/bin/\\\\n2025-12-03T16:31:46Z [verbose] multus-daemon started\\\\n2025-12-03T16:31:46Z [verbose] Readiness Indicator file check\\\\n2025-12-03T16:32:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.498883 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.517621 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.538262 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.553380 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.553632 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.553674 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.553688 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.553706 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.553723 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:33Z","lastTransitionTime":"2025-12-03T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.566599 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.586087 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62a3c6cd-0975-4166-933c-26ed4f9eae7a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.611379 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.630049 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.642946 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.656425 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.656456 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.656466 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.656481 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.656492 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:33Z","lastTransitionTime":"2025-12-03T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.658138 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.673061 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.689314 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.760587 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.760661 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.760679 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.760706 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.760722 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:33Z","lastTransitionTime":"2025-12-03T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.839530 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.839550 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:33 crc kubenswrapper[5002]: E1203 16:32:33.839826 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:33 crc kubenswrapper[5002]: E1203 16:32:33.839971 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.863925 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.863994 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.864006 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.864034 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.864048 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:33Z","lastTransitionTime":"2025-12-03T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.967265 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.967337 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.967352 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.967375 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:33 crc kubenswrapper[5002]: I1203 16:32:33.967390 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:33Z","lastTransitionTime":"2025-12-03T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.071176 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.071232 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.071243 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.071264 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.071274 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:34Z","lastTransitionTime":"2025-12-03T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.174675 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.174737 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.174779 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.174811 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.174831 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:34Z","lastTransitionTime":"2025-12-03T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.277669 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.277743 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.277795 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.277826 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.277850 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:34Z","lastTransitionTime":"2025-12-03T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.381020 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.381074 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.381088 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.381109 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.381120 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:34Z","lastTransitionTime":"2025-12-03T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.484505 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.484551 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.484564 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.484583 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.484597 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:34Z","lastTransitionTime":"2025-12-03T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.587706 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.587777 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.587791 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.587809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.587834 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:34Z","lastTransitionTime":"2025-12-03T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.690905 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.690973 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.690986 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.691007 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.691020 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:34Z","lastTransitionTime":"2025-12-03T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.794876 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.794981 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.795004 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.795042 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.795067 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:34Z","lastTransitionTime":"2025-12-03T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.839928 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.839956 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:34 crc kubenswrapper[5002]: E1203 16:32:34.840162 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:34 crc kubenswrapper[5002]: E1203 16:32:34.840243 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.898163 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.898219 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.898236 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.898257 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:34 crc kubenswrapper[5002]: I1203 16:32:34.898271 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:34Z","lastTransitionTime":"2025-12-03T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.001091 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.001134 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.001164 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.001180 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.001197 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:35Z","lastTransitionTime":"2025-12-03T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.105248 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.105307 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.105326 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.105350 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.105369 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:35Z","lastTransitionTime":"2025-12-03T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.208687 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.208737 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.208772 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.208795 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.208809 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:35Z","lastTransitionTime":"2025-12-03T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.312316 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.312351 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.312360 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.312377 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.312387 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:35Z","lastTransitionTime":"2025-12-03T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.415604 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.415653 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.415662 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.415680 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.415699 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:35Z","lastTransitionTime":"2025-12-03T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.519484 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.519869 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.519972 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.520104 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.520225 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:35Z","lastTransitionTime":"2025-12-03T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.622665 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.622806 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.622827 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.622860 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.622881 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:35Z","lastTransitionTime":"2025-12-03T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.726868 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.727367 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.727433 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.727497 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.727656 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:35Z","lastTransitionTime":"2025-12-03T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.830246 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.830306 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.830315 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.830330 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.830339 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:35Z","lastTransitionTime":"2025-12-03T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.839812 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.839846 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:35 crc kubenswrapper[5002]: E1203 16:32:35.840028 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:35 crc kubenswrapper[5002]: E1203 16:32:35.840262 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.932695 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.932765 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.932775 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.932794 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:35 crc kubenswrapper[5002]: I1203 16:32:35.932807 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:35Z","lastTransitionTime":"2025-12-03T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.036156 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.036198 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.036208 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.036243 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.036253 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:36Z","lastTransitionTime":"2025-12-03T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.138464 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.138495 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.138505 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.138518 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.138529 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:36Z","lastTransitionTime":"2025-12-03T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.242013 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.242063 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.242076 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.242094 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.242107 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:36Z","lastTransitionTime":"2025-12-03T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.345093 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.345174 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.345185 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.345204 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.345217 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:36Z","lastTransitionTime":"2025-12-03T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.448663 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.448736 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.448771 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.448797 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.448812 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:36Z","lastTransitionTime":"2025-12-03T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.551902 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.552016 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.552083 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.552113 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.552201 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:36Z","lastTransitionTime":"2025-12-03T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.655044 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.655091 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.655103 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.655123 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.655137 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:36Z","lastTransitionTime":"2025-12-03T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.757944 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.757998 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.758011 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.758032 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.758046 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:36Z","lastTransitionTime":"2025-12-03T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.839801 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.839840 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:36 crc kubenswrapper[5002]: E1203 16:32:36.839984 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:36 crc kubenswrapper[5002]: E1203 16:32:36.840053 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.857801 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.861373 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.861443 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.861486 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.861523 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.861549 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:36Z","lastTransitionTime":"2025-12-03T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.878671 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.907951 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.927349 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.952572 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.964294 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.964356 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.964367 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.964387 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.964399 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:36Z","lastTransitionTime":"2025-12-03T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.979335 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:12Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1203 16:32:12.516660 6624 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 16:32:12.516574 6624 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc after 0 failed attempt(s)\\\\nI1203 16:32:12.516390 6624 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-gjxps\\\\nF1203 16:32:12.516657 6624 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:36 crc kubenswrapper[5002]: I1203 16:32:36.993459 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"546767f2-efb8-4aa3-82bd-07763628a15c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cc1b69d0fae8296c84934db54ef2ac95c589188d7060be0dd5d0a55429e24a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.010589 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.024199 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.043599 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46d73cb93f4e004b38542b0972ec1eab4e61b4698c14cef11eae5b5aeb61c233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:31Z\\\",\\\"message\\\":\\\"2025-12-03T16:31:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_79712779-2aec-42b5-a526-4bf63ddc226a\\\\n2025-12-03T16:31:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_79712779-2aec-42b5-a526-4bf63ddc226a to /host/opt/cni/bin/\\\\n2025-12-03T16:31:46Z [verbose] multus-daemon started\\\\n2025-12-03T16:31:46Z [verbose] Readiness Indicator file check\\\\n2025-12-03T16:32:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.067200 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.067272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.067284 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.067307 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.067322 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:37Z","lastTransitionTime":"2025-12-03T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.067819 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.087228 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.100777 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.117109 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.131672 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.144907 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62a3c6cd-0975-4166-933c-26ed4f9eae7a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.159302 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.170421 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.170660 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.170732 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.170888 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.170975 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:37Z","lastTransitionTime":"2025-12-03T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.177767 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:37Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.274455 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.275349 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.275437 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.275542 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.275621 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:37Z","lastTransitionTime":"2025-12-03T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.379237 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.379339 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.379365 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.379396 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.379415 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:37Z","lastTransitionTime":"2025-12-03T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.482166 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.482223 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.482235 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.482258 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.482271 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:37Z","lastTransitionTime":"2025-12-03T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.585468 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.585558 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.585640 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.585679 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.585705 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:37Z","lastTransitionTime":"2025-12-03T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.688892 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.688980 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.689002 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.689031 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.689082 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:37Z","lastTransitionTime":"2025-12-03T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.793096 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.793145 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.793159 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.793185 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.793203 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:37Z","lastTransitionTime":"2025-12-03T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.839433 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.839536 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:37 crc kubenswrapper[5002]: E1203 16:32:37.839642 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:37 crc kubenswrapper[5002]: E1203 16:32:37.839821 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.896837 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.896894 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.896904 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.896936 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.896952 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:37Z","lastTransitionTime":"2025-12-03T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.999560 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.999612 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.999625 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.999647 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:37 crc kubenswrapper[5002]: I1203 16:32:37.999659 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:37Z","lastTransitionTime":"2025-12-03T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.102815 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.102892 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.102919 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.102954 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.102975 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:38Z","lastTransitionTime":"2025-12-03T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.206362 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.206416 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.206429 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.206449 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.206467 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:38Z","lastTransitionTime":"2025-12-03T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.309464 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.309528 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.309546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.309573 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.309591 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:38Z","lastTransitionTime":"2025-12-03T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.412377 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.412429 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.412440 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.412464 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.412475 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:38Z","lastTransitionTime":"2025-12-03T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.515279 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.515324 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.515337 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.515356 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.515367 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:38Z","lastTransitionTime":"2025-12-03T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.618699 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.618793 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.618810 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.618831 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.618848 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:38Z","lastTransitionTime":"2025-12-03T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.721699 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.721779 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.721792 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.721806 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.721816 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:38Z","lastTransitionTime":"2025-12-03T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.825670 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.825731 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.825782 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.825806 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.825824 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:38Z","lastTransitionTime":"2025-12-03T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.841091 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:38 crc kubenswrapper[5002]: E1203 16:32:38.841310 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.841830 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:38 crc kubenswrapper[5002]: E1203 16:32:38.841959 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.928739 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.928812 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.928824 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.928842 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:38 crc kubenswrapper[5002]: I1203 16:32:38.928855 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:38Z","lastTransitionTime":"2025-12-03T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.032597 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.032670 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.032694 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.032724 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.032788 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:39Z","lastTransitionTime":"2025-12-03T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.135825 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.135874 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.135891 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.135914 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.135932 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:39Z","lastTransitionTime":"2025-12-03T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.238850 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.238924 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.238947 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.238976 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.239002 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:39Z","lastTransitionTime":"2025-12-03T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.341671 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.341737 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.341792 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.341918 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.341939 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:39Z","lastTransitionTime":"2025-12-03T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.444685 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.444724 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.444733 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.444765 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.444776 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:39Z","lastTransitionTime":"2025-12-03T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.548107 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.548156 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.548168 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.548186 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.548200 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:39Z","lastTransitionTime":"2025-12-03T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.651472 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.651545 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.651560 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.651609 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.651626 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:39Z","lastTransitionTime":"2025-12-03T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.657977 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.658146 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:43.658123311 +0000 UTC m=+147.071945199 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.658232 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.658282 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.658390 5002 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.658405 5002 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.658456 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:33:43.658437969 +0000 UTC m=+147.072259857 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.658476 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:33:43.65846897 +0000 UTC m=+147.072290858 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.754222 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.754289 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.754299 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.754314 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.754327 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:39Z","lastTransitionTime":"2025-12-03T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.758903 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.759042 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.759099 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.759122 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.759136 5002 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.759174 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.759194 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:33:43.759172708 +0000 UTC m=+147.172994596 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.759202 5002 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.759219 5002 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.759265 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:33:43.75924977 +0000 UTC m=+147.173071678 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.840028 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.840101 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.840175 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:39 crc kubenswrapper[5002]: E1203 16:32:39.840446 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.841311 5002 scope.go:117] "RemoveContainer" containerID="0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.857185 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.857226 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.857242 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.857263 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.857281 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:39Z","lastTransitionTime":"2025-12-03T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.960440 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.960484 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.960499 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.960516 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:39 crc kubenswrapper[5002]: I1203 16:32:39.960529 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:39Z","lastTransitionTime":"2025-12-03T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.063235 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.063274 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.063285 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.063299 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.063309 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.166430 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.166521 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.166543 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.166573 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.166592 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.268924 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.268983 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.269004 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.269032 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.269053 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.372250 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.372287 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.372295 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.372312 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.372323 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.375606 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/2.log" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.378236 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.378658 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.379050 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.379090 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.379100 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.379123 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.379135 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: E1203 16:32:40.393061 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.394888 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.398634 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.398671 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.398685 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.398703 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.398716 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.407700 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: E1203 16:32:40.410245 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.414321 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.414365 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.414377 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.414395 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.414408 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.422445 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: E1203 16:32:40.426095 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.430278 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.430323 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.430344 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.430367 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.430383 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.441918 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: E1203 16:32:40.445351 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.448712 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.448792 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.448805 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.448837 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.448853 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.453527 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: E1203 16:32:40.461078 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1f887d77-b9a6-4290-9d59-445b9644ebf8\\\",\\\"systemUUID\\\":\\\"c926b5a3-23cd-42f4-be44-84fd294ba72b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: E1203 16:32:40.461254 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.463311 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"546767f2-efb8-4aa3-82bd-07763628a15c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cc1b69d0fae8296c84934db54ef2ac95c589188d7060be0dd5d0a55429e24a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.475070 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.475395 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.475988 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.476156 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.476301 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.481909 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.495472 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.518914 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:12Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1203 16:32:12.516660 6624 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 16:32:12.516574 6624 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc after 0 failed attempt(s)\\\\nI1203 16:32:12.516390 6624 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-gjxps\\\\nF1203 16:32:12.516657 6624 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.531730 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.545518 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.569877 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46d73cb93f4e004b38542b0972ec1eab4e61b4698c14cef11eae5b5aeb61c233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:31Z\\\",\\\"message\\\":\\\"2025-12-03T16:31:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_79712779-2aec-42b5-a526-4bf63ddc226a\\\\n2025-12-03T16:31:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_79712779-2aec-42b5-a526-4bf63ddc226a to /host/opt/cni/bin/\\\\n2025-12-03T16:31:46Z [verbose] multus-daemon started\\\\n2025-12-03T16:31:46Z [verbose] Readiness Indicator file check\\\\n2025-12-03T16:32:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.578231 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.578264 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.578272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.578286 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.578298 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.584172 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.597643 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62a3c6cd-0975-4166-933c-26ed4f9eae7a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.611961 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.625251 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.635205 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.644123 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.681101 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.681140 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.681152 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.681172 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.681183 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.784343 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.784592 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.784602 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.784619 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.784631 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.840423 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:40 crc kubenswrapper[5002]: E1203 16:32:40.840797 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.841050 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:40 crc kubenswrapper[5002]: E1203 16:32:40.841165 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.886985 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.887279 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.887391 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.887493 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.887573 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.990201 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.990232 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.990241 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.990255 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:40 crc kubenswrapper[5002]: I1203 16:32:40.990264 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:40Z","lastTransitionTime":"2025-12-03T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.091843 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.091888 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.091898 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.091915 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.091928 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:41Z","lastTransitionTime":"2025-12-03T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.194532 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.194595 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.194614 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.194640 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.194660 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:41Z","lastTransitionTime":"2025-12-03T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.297540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.297608 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.297626 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.297656 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.297673 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:41Z","lastTransitionTime":"2025-12-03T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.384998 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/3.log" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.386820 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/2.log" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.391008 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b" exitCode=1 Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.391081 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b"} Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.391139 5002 scope.go:117] "RemoveContainer" containerID="0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.392015 5002 scope.go:117] "RemoveContainer" containerID="4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b" Dec 03 16:32:41 crc kubenswrapper[5002]: E1203 16:32:41.392229 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.405839 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.405882 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.405896 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.405916 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.405930 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:41Z","lastTransitionTime":"2025-12-03T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.417831 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46d73cb93f4e004b38542b0972ec1eab4e61b4698c14cef11eae5b5aeb61c233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:31Z\\\",\\\"message\\\":\\\"2025-12-03T16:31:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_79712779-2aec-42b5-a526-4bf63ddc226a\\\\n2025-12-03T16:31:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_79712779-2aec-42b5-a526-4bf63ddc226a to /host/opt/cni/bin/\\\\n2025-12-03T16:31:46Z [verbose] multus-daemon started\\\\n2025-12-03T16:31:46Z [verbose] Readiness Indicator file check\\\\n2025-12-03T16:32:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.437836 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.453517 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.466571 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.480292 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.497256 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.508009 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.508079 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.508099 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.508129 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.508168 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:41Z","lastTransitionTime":"2025-12-03T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.517101 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62a3c6cd-0975-4166-933c-26ed4f9eae7a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.535296 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.555152 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.572037 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.589595 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.608212 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.616889 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.616952 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.617054 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.617101 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.617114 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:41Z","lastTransitionTime":"2025-12-03T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.632025 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.648842 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.670927 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c7cdeb8b681068b5c8498c131d6e6e092d2207b488989df3181b93020008ec2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:12Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1203 16:32:12.516660 6624 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 16:32:12.516574 6624 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc after 0 failed attempt(s)\\\\nI1203 16:32:12.516390 6624 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-gjxps\\\\nF1203 16:32:12.516657 6624 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:12Z is after 2025-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:41Z\\\",\\\"message\\\":\\\"tring{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-diagnostics/network-check-target\\\\\\\"}\\\\nI1203 16:32:40.930157 7018 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nF1203 16:32:40.930160 7018 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z]\\\\nI1203 16:32:40.930168 7018 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1203 16:32:40.930172 7018 default_network_controller.go:776] Recording success event on pod opens\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.685112 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"546767f2-efb8-4aa3-82bd-07763628a15c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cc1b69d0fae8296c84934db54ef2ac95c589188d7060be0dd5d0a55429e24a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.702365 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.717634 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:41Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.719460 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.719634 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.719774 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.719898 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.720006 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:41Z","lastTransitionTime":"2025-12-03T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.822232 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.822267 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.822275 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.822291 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.822367 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:41Z","lastTransitionTime":"2025-12-03T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.840130 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:41 crc kubenswrapper[5002]: E1203 16:32:41.840379 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.840173 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:41 crc kubenswrapper[5002]: E1203 16:32:41.840646 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.924108 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.924382 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.924450 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.924524 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:41 crc kubenswrapper[5002]: I1203 16:32:41.924586 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:41Z","lastTransitionTime":"2025-12-03T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.027620 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.027668 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.027683 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.027704 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.027714 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:42Z","lastTransitionTime":"2025-12-03T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.130895 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.130949 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.130964 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.130985 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.130999 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:42Z","lastTransitionTime":"2025-12-03T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.234417 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.234502 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.234519 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.234559 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.234595 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:42Z","lastTransitionTime":"2025-12-03T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.338340 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.338412 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.338438 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.338471 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.338493 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:42Z","lastTransitionTime":"2025-12-03T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.397877 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/3.log" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.402325 5002 scope.go:117] "RemoveContainer" containerID="4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b" Dec 03 16:32:42 crc kubenswrapper[5002]: E1203 16:32:42.402514 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.422365 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.442439 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.442517 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.442545 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.442670 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.442800 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:42Z","lastTransitionTime":"2025-12-03T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.442770 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7589f91cdac8e63338540df0bc5da9fb3233e73b15e75df6971d89bad7ea6f56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgvqd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bzb7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.469621 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:41Z\\\",\\\"message\\\":\\\"tring{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-diagnostics/network-check-target\\\\\\\"}\\\\nI1203 16:32:40.930157 7018 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nF1203 16:32:40.930160 7018 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:40Z is after 2025-08-24T17:21:41Z]\\\\nI1203 16:32:40.930168 7018 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1203 16:32:40.930172 7018 default_network_controller.go:776] Recording success event on pod opens\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:32:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pxr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4szh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.487517 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"546767f2-efb8-4aa3-82bd-07763628a15c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cc1b69d0fae8296c84934db54ef2ac95c589188d7060be0dd5d0a55429e24a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a49ba3a6c8edfdc4eb41725956b917da478f2a787d3562f4c06f960a677e67e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.507098 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.520034 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6kd7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9009aa9-e6e5-41d5-800d-c3680572f71d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00371c128e8f151606a6e9534ecb6ae222c3df1d1c14277e7c4bce5b28c0152e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfmnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6kd7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.540314 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gjxps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2de485fd-67c0-4be7-abb1-92509ea373da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46d73cb93f4e004b38542b0972ec1eab4e61b4698c14cef11eae5b5aeb61c233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:32:31Z\\\",\\\"message\\\":\\\"2025-12-03T16:31:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_79712779-2aec-42b5-a526-4bf63ddc226a\\\\n2025-12-03T16:31:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_79712779-2aec-42b5-a526-4bf63ddc226a to /host/opt/cni/bin/\\\\n2025-12-03T16:31:46Z [verbose] multus-daemon started\\\\n2025-12-03T16:31:46Z [verbose] Readiness Indicator file check\\\\n2025-12-03T16:32:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qshft\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gjxps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.546371 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.546428 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.546447 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.546476 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.546496 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:42Z","lastTransitionTime":"2025-12-03T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.563468 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b16d350b-678e-4558-ac4c-634fcdb2d2f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c87cdd17853ad5ddc0990bb7ddac4e37d5d29521cec3ccad390a412c2ab5ca11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://488a7dba1117b0f415adfe614697091f0c45fdade9f38d690291f5841c943675\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fde9e6e73c2266a01280e1b54445c2ed849d9287b716ea7adea398dfe1911151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62053649a34e2735e14948de8a4912d256f4a0f3b11022003c102c8ac2a94394\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e91b5de3c589429797f8378a81cd741595f4e35c4ce387fa79ab18a3a18adaf0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ca66f9b1880231d1931e51684434ab6afc6cd858d494b90e992622f81e80e10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7b162408347256344b33f0be8d4cbc8a56627636027747a0f7d8d9412414e72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k865f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-8v6vc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.586238 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 16:31:29.514186 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 16:31:29.515343 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3371459023/tls.crt::/tmp/serving-cert-3371459023/tls.key\\\\\\\"\\\\nI1203 16:31:35.309411 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 16:31:35.311731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 16:31:35.311784 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 16:31:35.311810 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 16:31:35.311816 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 16:31:35.316257 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1203 16:31:35.316311 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1203 16:31:35.316328 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316335 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 16:31:35.316340 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 16:31:35.316343 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 16:31:35.316347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 16:31:35.316350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1203 16:31:35.320067 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.602804 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a26d989714c4f7dc45bf162419a4070f04d4cf1f8544fc8952374657a9628b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.618524 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://809c34216af8ff6dcb323756201ffd2a84936d071ce04f1525dcc9e64e2e79e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.631875 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24141739-e7a8-40cf-ab9e-267ee876230b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-96xrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c7qvw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.649724 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.649794 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.649807 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.649830 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.649846 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:42Z","lastTransitionTime":"2025-12-03T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.651048 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62a3c6cd-0975-4166-933c-26ed4f9eae7a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13c528a84e90d3d8fa2cc59cbeb66a1d059c7a1c23ef522222c14569224e8010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7de69c7000b96467d9d8ce442e2bcc904e08301e7c9d80314ceaeb185992e9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://002a68160f59bace6d3a89b29133673cccffe637029832ad6f24be4fc935d63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e3dad1bcd412746cdf996edf60a705162d07b46b61e9a37de4b9bf491e0156e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:31:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.673532 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.699077 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355ebd53089858d623745a0ef16a8e53e1807436093b811938b6d2c6e5ce6384\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://694c6602525e28e0dfff9fbb2dccdf90e61263c593d276b801055b2ca2dc6a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.714623 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-ldz4j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"21cef49c-bd93-40d7-a2f7-0c8338455ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3c665831f792880140c8770c4a6400fc25badb854fa5295086cdee36779a3fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-48mk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-ldz4j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.728397 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f47d5289-b50f-4012-bc4d-2aa9df7864c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6f2d19cd57311732442fb494575cffdefeb6220320039412499a3309adb65b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e10cdcbe921d6be5b108c26945fcfeb6380d911c59dd3d55eae681e2c132d2f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88zbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dvd6r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.741611 5002 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f414bfe-1b0f-4c34-bb3a-5fa5af8b18db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:31:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c52c2e645d329326348aaa5f8e7c0f86d5ca95dbac5d3d25eff60c601ae4296f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67d28a28c490033e510797564f20adb11406e8a143186700cb01628db58c4e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc2ad9afe0cb2b69d89498aa474418ac5135a45fe0393d00839e67b137e4a2fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:31:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:31:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:32:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.753146 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.753205 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.753220 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.753248 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.753265 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:42Z","lastTransitionTime":"2025-12-03T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.840396 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:42 crc kubenswrapper[5002]: E1203 16:32:42.840606 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.840894 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:42 crc kubenswrapper[5002]: E1203 16:32:42.841173 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.856192 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.856678 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.856812 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.856933 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.857033 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:42Z","lastTransitionTime":"2025-12-03T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.966546 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.967026 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.967127 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.967228 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:42 crc kubenswrapper[5002]: I1203 16:32:42.967357 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:42Z","lastTransitionTime":"2025-12-03T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.070454 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.070507 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.070520 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.070540 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.070555 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:43Z","lastTransitionTime":"2025-12-03T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.174728 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.174832 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.174852 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.174881 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.174903 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:43Z","lastTransitionTime":"2025-12-03T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.278436 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.278505 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.278524 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.278550 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.278568 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:43Z","lastTransitionTime":"2025-12-03T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.382192 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.382266 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.382286 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.382316 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.382334 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:43Z","lastTransitionTime":"2025-12-03T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.486229 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.486327 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.486386 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.486421 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.486447 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:43Z","lastTransitionTime":"2025-12-03T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.589596 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.589635 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.589643 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.589660 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.589671 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:43Z","lastTransitionTime":"2025-12-03T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.694024 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.694125 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.694178 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.694214 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.694232 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:43Z","lastTransitionTime":"2025-12-03T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.797279 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.797340 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.797354 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.797373 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.797386 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:43Z","lastTransitionTime":"2025-12-03T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.839794 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:43 crc kubenswrapper[5002]: E1203 16:32:43.840010 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.840108 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:43 crc kubenswrapper[5002]: E1203 16:32:43.840348 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.900508 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.900555 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.900567 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.900583 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:43 crc kubenswrapper[5002]: I1203 16:32:43.900594 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:43Z","lastTransitionTime":"2025-12-03T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.004011 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.004092 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.004111 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.004139 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.004156 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:44Z","lastTransitionTime":"2025-12-03T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.107957 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.108032 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.108046 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.108075 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.108096 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:44Z","lastTransitionTime":"2025-12-03T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.211419 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.211496 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.211530 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.211562 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.211584 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:44Z","lastTransitionTime":"2025-12-03T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.314659 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.314710 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.314722 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.314742 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.314770 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:44Z","lastTransitionTime":"2025-12-03T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.417453 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.417545 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.417565 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.417593 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.417611 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:44Z","lastTransitionTime":"2025-12-03T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.521442 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.521511 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.521530 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.521562 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.521581 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:44Z","lastTransitionTime":"2025-12-03T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.624160 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.624218 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.624235 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.624263 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.624281 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:44Z","lastTransitionTime":"2025-12-03T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.727271 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.727319 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.727330 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.727347 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.727359 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:44Z","lastTransitionTime":"2025-12-03T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.831089 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.831155 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.831165 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.831182 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.831200 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:44Z","lastTransitionTime":"2025-12-03T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.839816 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.839837 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:44 crc kubenswrapper[5002]: E1203 16:32:44.839946 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:44 crc kubenswrapper[5002]: E1203 16:32:44.840058 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.935024 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.935151 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.935168 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.935192 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:44 crc kubenswrapper[5002]: I1203 16:32:44.935230 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:44Z","lastTransitionTime":"2025-12-03T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.038475 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.038577 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.038601 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.038647 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.038671 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:45Z","lastTransitionTime":"2025-12-03T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.141819 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.141917 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.141928 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.141963 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.141974 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:45Z","lastTransitionTime":"2025-12-03T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.245582 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.245636 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.245648 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.245669 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.245682 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:45Z","lastTransitionTime":"2025-12-03T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.349462 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.349524 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.349535 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.349571 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.349583 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:45Z","lastTransitionTime":"2025-12-03T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.453276 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.453328 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.453337 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.453353 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.453364 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:45Z","lastTransitionTime":"2025-12-03T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.556724 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.556796 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.556809 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.556828 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.556840 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:45Z","lastTransitionTime":"2025-12-03T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.660470 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.660556 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.660579 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.660607 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.660625 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:45Z","lastTransitionTime":"2025-12-03T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.764308 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.764383 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.764401 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.764431 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.764450 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:45Z","lastTransitionTime":"2025-12-03T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.839606 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.839782 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:45 crc kubenswrapper[5002]: E1203 16:32:45.840110 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:45 crc kubenswrapper[5002]: E1203 16:32:45.840269 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.867646 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.867681 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.867694 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.867714 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.867727 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:45Z","lastTransitionTime":"2025-12-03T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.970668 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.970735 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.970784 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.970816 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:45 crc kubenswrapper[5002]: I1203 16:32:45.970835 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:45Z","lastTransitionTime":"2025-12-03T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.074420 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.074507 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.074539 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.074572 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.074594 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:46Z","lastTransitionTime":"2025-12-03T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.177278 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.177320 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.177332 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.177349 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.177358 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:46Z","lastTransitionTime":"2025-12-03T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.280497 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.280568 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.280582 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.280602 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.280620 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:46Z","lastTransitionTime":"2025-12-03T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.384014 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.384072 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.384086 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.384112 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.384127 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:46Z","lastTransitionTime":"2025-12-03T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.487272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.487319 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.487337 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.487356 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.487368 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:46Z","lastTransitionTime":"2025-12-03T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.591229 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.591282 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.591295 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.591315 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.591328 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:46Z","lastTransitionTime":"2025-12-03T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.693932 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.694004 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.694022 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.694051 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.694070 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:46Z","lastTransitionTime":"2025-12-03T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.798015 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.798092 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.798113 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.798145 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.798165 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:46Z","lastTransitionTime":"2025-12-03T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.840547 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.840596 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:46 crc kubenswrapper[5002]: E1203 16:32:46.840881 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:46 crc kubenswrapper[5002]: E1203 16:32:46.841047 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.873106 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=39.873074751 podStartE2EDuration="39.873074751s" podCreationTimestamp="2025-12-03 16:32:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:32:46.872604799 +0000 UTC m=+90.286426707" watchObservedRunningTime="2025-12-03 16:32:46.873074751 +0000 UTC m=+90.286896659" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.901144 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.901191 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.901231 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.901252 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.901266 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:46Z","lastTransitionTime":"2025-12-03T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.921918 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=71.92189364 podStartE2EDuration="1m11.92189364s" podCreationTimestamp="2025-12-03 16:31:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:32:46.896469289 +0000 UTC m=+90.310291207" watchObservedRunningTime="2025-12-03 16:32:46.92189364 +0000 UTC m=+90.335715548" Dec 03 16:32:46 crc kubenswrapper[5002]: I1203 16:32:46.979730 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=67.979698292 podStartE2EDuration="1m7.979698292s" podCreationTimestamp="2025-12-03 16:31:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:32:46.97963359 +0000 UTC m=+90.393455478" watchObservedRunningTime="2025-12-03 16:32:46.979698292 +0000 UTC m=+90.393520210" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.007356 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.007404 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.007418 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.007444 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.007457 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:47Z","lastTransitionTime":"2025-12-03T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.054839 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-ldz4j" podStartSLOduration=66.054804135 podStartE2EDuration="1m6.054804135s" podCreationTimestamp="2025-12-03 16:31:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:32:47.037451484 +0000 UTC m=+90.451273392" watchObservedRunningTime="2025-12-03 16:32:47.054804135 +0000 UTC m=+90.468626023" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.068135 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=28.068107781 podStartE2EDuration="28.068107781s" podCreationTimestamp="2025-12-03 16:32:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:32:47.06807713 +0000 UTC m=+90.481899028" watchObservedRunningTime="2025-12-03 16:32:47.068107781 +0000 UTC m=+90.481929679" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.068374 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dvd6r" podStartSLOduration=65.068366358 podStartE2EDuration="1m5.068366358s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:32:47.054478887 +0000 UTC m=+90.468300785" watchObservedRunningTime="2025-12-03 16:32:47.068366358 +0000 UTC m=+90.482188256" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.097907 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podStartSLOduration=65.097885565 podStartE2EDuration="1m5.097885565s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:32:47.097718401 +0000 UTC m=+90.511540299" watchObservedRunningTime="2025-12-03 16:32:47.097885565 +0000 UTC m=+90.511707453" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.110230 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.110274 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.110283 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.110298 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.110307 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:47Z","lastTransitionTime":"2025-12-03T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.179061 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-6kd7v" podStartSLOduration=66.179036645 podStartE2EDuration="1m6.179036645s" podCreationTimestamp="2025-12-03 16:31:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:32:47.162835253 +0000 UTC m=+90.576657161" watchObservedRunningTime="2025-12-03 16:32:47.179036645 +0000 UTC m=+90.592858533" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.179321 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-gjxps" podStartSLOduration=65.179314092 podStartE2EDuration="1m5.179314092s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:32:47.178595453 +0000 UTC m=+90.592417371" watchObservedRunningTime="2025-12-03 16:32:47.179314092 +0000 UTC m=+90.593135980" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.200125 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-8v6vc" podStartSLOduration=65.200102292 podStartE2EDuration="1m5.200102292s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:32:47.198640294 +0000 UTC m=+90.612462192" watchObservedRunningTime="2025-12-03 16:32:47.200102292 +0000 UTC m=+90.613924180" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.212661 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.212720 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.212730 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.212821 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.212833 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:47Z","lastTransitionTime":"2025-12-03T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.315601 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.315654 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.315669 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.315691 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.315702 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:47Z","lastTransitionTime":"2025-12-03T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.419081 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.419174 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.419189 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.419215 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.419235 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:47Z","lastTransitionTime":"2025-12-03T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.522026 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.522076 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.522089 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.522107 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.522120 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:47Z","lastTransitionTime":"2025-12-03T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.626131 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.626198 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.626209 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.626246 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.626270 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:47Z","lastTransitionTime":"2025-12-03T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.729479 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.729526 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.729534 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.729549 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.729562 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:47Z","lastTransitionTime":"2025-12-03T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.834430 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.834526 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.834545 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.834572 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.834592 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:47Z","lastTransitionTime":"2025-12-03T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.839627 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:47 crc kubenswrapper[5002]: E1203 16:32:47.839811 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.839894 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:47 crc kubenswrapper[5002]: E1203 16:32:47.839963 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.943520 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.944186 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.944274 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.944397 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:47 crc kubenswrapper[5002]: I1203 16:32:47.944491 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:47Z","lastTransitionTime":"2025-12-03T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.049028 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.049076 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.049090 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.049111 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.049129 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:48Z","lastTransitionTime":"2025-12-03T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.151507 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.151544 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.151553 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.151612 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.151621 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:48Z","lastTransitionTime":"2025-12-03T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.254889 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.254930 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.254939 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.254955 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.254967 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:48Z","lastTransitionTime":"2025-12-03T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.358482 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.358536 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.358548 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.358570 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.358585 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:48Z","lastTransitionTime":"2025-12-03T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.462021 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.462086 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.462104 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.462150 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.462168 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:48Z","lastTransitionTime":"2025-12-03T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.565597 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.565660 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.565676 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.565703 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.565722 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:48Z","lastTransitionTime":"2025-12-03T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.669221 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.669276 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.669287 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.669306 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.669316 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:48Z","lastTransitionTime":"2025-12-03T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.772578 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.772650 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.772666 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.772691 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.772706 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:48Z","lastTransitionTime":"2025-12-03T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.839632 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:48 crc kubenswrapper[5002]: E1203 16:32:48.839886 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.840073 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:48 crc kubenswrapper[5002]: E1203 16:32:48.840244 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.876407 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.876671 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.876891 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.877041 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.877182 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:48Z","lastTransitionTime":"2025-12-03T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.979607 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.979651 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.979662 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.979679 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:48 crc kubenswrapper[5002]: I1203 16:32:48.979692 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:48Z","lastTransitionTime":"2025-12-03T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.083727 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.083821 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.083838 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.083868 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.083889 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:49Z","lastTransitionTime":"2025-12-03T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.187410 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.187495 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.187517 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.187552 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.187577 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:49Z","lastTransitionTime":"2025-12-03T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.291018 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.291090 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.291115 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.291148 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.291172 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:49Z","lastTransitionTime":"2025-12-03T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.396342 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.396384 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.396395 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.396412 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.396424 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:49Z","lastTransitionTime":"2025-12-03T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.499275 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.499376 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.499400 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.499435 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.499462 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:49Z","lastTransitionTime":"2025-12-03T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.601943 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.602011 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.602028 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.602053 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.602072 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:49Z","lastTransitionTime":"2025-12-03T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.705088 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.705195 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.705209 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.705227 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.705242 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:49Z","lastTransitionTime":"2025-12-03T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.807767 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.807868 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.807888 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.807915 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.807933 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:49Z","lastTransitionTime":"2025-12-03T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.839794 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.839834 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:49 crc kubenswrapper[5002]: E1203 16:32:49.839992 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:49 crc kubenswrapper[5002]: E1203 16:32:49.840132 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.911828 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.911908 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.911947 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.911982 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:49 crc kubenswrapper[5002]: I1203 16:32:49.912008 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:49Z","lastTransitionTime":"2025-12-03T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.015199 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.015272 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.015287 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.015307 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.015323 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:50Z","lastTransitionTime":"2025-12-03T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.119033 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.119112 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.119131 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.119160 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.119180 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:50Z","lastTransitionTime":"2025-12-03T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.223410 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.223468 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.223481 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.223503 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.223517 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:50Z","lastTransitionTime":"2025-12-03T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.326937 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.326983 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.326992 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.327008 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.327018 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:50Z","lastTransitionTime":"2025-12-03T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.430023 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.430104 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.430114 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.430131 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.430143 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:50Z","lastTransitionTime":"2025-12-03T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.539317 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.540293 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.540388 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.540650 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.540712 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:50Z","lastTransitionTime":"2025-12-03T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.643951 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.644024 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.644042 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.644069 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.644086 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:50Z","lastTransitionTime":"2025-12-03T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.714563 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.714650 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.714665 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.714684 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.714696 5002 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:32:50Z","lastTransitionTime":"2025-12-03T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.775564 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k"] Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.776223 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.778118 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.778569 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.779146 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.780569 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.839853 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.839873 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:50 crc kubenswrapper[5002]: E1203 16:32:50.840060 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:50 crc kubenswrapper[5002]: E1203 16:32:50.840190 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.890543 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/32293180-236d-484e-a9fa-d2d9cd940535-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.890618 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/32293180-236d-484e-a9fa-d2d9cd940535-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.890654 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32293180-236d-484e-a9fa-d2d9cd940535-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.890704 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/32293180-236d-484e-a9fa-d2d9cd940535-service-ca\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.890853 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/32293180-236d-484e-a9fa-d2d9cd940535-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.992467 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/32293180-236d-484e-a9fa-d2d9cd940535-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.992523 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/32293180-236d-484e-a9fa-d2d9cd940535-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.992551 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/32293180-236d-484e-a9fa-d2d9cd940535-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.992574 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32293180-236d-484e-a9fa-d2d9cd940535-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.992598 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/32293180-236d-484e-a9fa-d2d9cd940535-service-ca\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.993074 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/32293180-236d-484e-a9fa-d2d9cd940535-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.993150 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/32293180-236d-484e-a9fa-d2d9cd940535-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:50 crc kubenswrapper[5002]: I1203 16:32:50.993847 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/32293180-236d-484e-a9fa-d2d9cd940535-service-ca\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:51 crc kubenswrapper[5002]: I1203 16:32:51.007462 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32293180-236d-484e-a9fa-d2d9cd940535-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:51 crc kubenswrapper[5002]: I1203 16:32:51.023509 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/32293180-236d-484e-a9fa-d2d9cd940535-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-h9n6k\" (UID: \"32293180-236d-484e-a9fa-d2d9cd940535\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:51 crc kubenswrapper[5002]: I1203 16:32:51.100924 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" Dec 03 16:32:51 crc kubenswrapper[5002]: W1203 16:32:51.128700 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32293180_236d_484e_a9fa_d2d9cd940535.slice/crio-716abe9113dc7e31499e40dd131b0b4c65dd079eebd68bb5e8c91af7f48fd966 WatchSource:0}: Error finding container 716abe9113dc7e31499e40dd131b0b4c65dd079eebd68bb5e8c91af7f48fd966: Status 404 returned error can't find the container with id 716abe9113dc7e31499e40dd131b0b4c65dd079eebd68bb5e8c91af7f48fd966 Dec 03 16:32:51 crc kubenswrapper[5002]: I1203 16:32:51.440438 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" event={"ID":"32293180-236d-484e-a9fa-d2d9cd940535","Type":"ContainerStarted","Data":"6d4dec867b50fdf82d14b3e6c59689274a992052e762dac2bb7a675d4b7e9303"} Dec 03 16:32:51 crc kubenswrapper[5002]: I1203 16:32:51.440510 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" event={"ID":"32293180-236d-484e-a9fa-d2d9cd940535","Type":"ContainerStarted","Data":"716abe9113dc7e31499e40dd131b0b4c65dd079eebd68bb5e8c91af7f48fd966"} Dec 03 16:32:51 crc kubenswrapper[5002]: I1203 16:32:51.455010 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h9n6k" podStartSLOduration=69.454977541 podStartE2EDuration="1m9.454977541s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:32:51.454357255 +0000 UTC m=+94.868179183" watchObservedRunningTime="2025-12-03 16:32:51.454977541 +0000 UTC m=+94.868799469" Dec 03 16:32:51 crc kubenswrapper[5002]: I1203 16:32:51.840057 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:51 crc kubenswrapper[5002]: I1203 16:32:51.840176 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:51 crc kubenswrapper[5002]: E1203 16:32:51.840442 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:51 crc kubenswrapper[5002]: E1203 16:32:51.840545 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:52 crc kubenswrapper[5002]: I1203 16:32:52.839250 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:52 crc kubenswrapper[5002]: I1203 16:32:52.839250 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:52 crc kubenswrapper[5002]: E1203 16:32:52.839719 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:52 crc kubenswrapper[5002]: E1203 16:32:52.840032 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:53 crc kubenswrapper[5002]: I1203 16:32:53.839966 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:53 crc kubenswrapper[5002]: I1203 16:32:53.840125 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:53 crc kubenswrapper[5002]: E1203 16:32:53.840219 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:53 crc kubenswrapper[5002]: E1203 16:32:53.840335 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:54 crc kubenswrapper[5002]: I1203 16:32:54.840142 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:54 crc kubenswrapper[5002]: I1203 16:32:54.840253 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:54 crc kubenswrapper[5002]: E1203 16:32:54.840306 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:54 crc kubenswrapper[5002]: E1203 16:32:54.840468 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:55 crc kubenswrapper[5002]: I1203 16:32:55.839838 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:55 crc kubenswrapper[5002]: I1203 16:32:55.839941 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:55 crc kubenswrapper[5002]: E1203 16:32:55.839990 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:55 crc kubenswrapper[5002]: E1203 16:32:55.840185 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:56 crc kubenswrapper[5002]: I1203 16:32:56.839817 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:56 crc kubenswrapper[5002]: I1203 16:32:56.839852 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:56 crc kubenswrapper[5002]: E1203 16:32:56.840854 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:56 crc kubenswrapper[5002]: E1203 16:32:56.841082 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:57 crc kubenswrapper[5002]: I1203 16:32:57.840571 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:57 crc kubenswrapper[5002]: I1203 16:32:57.840693 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:57 crc kubenswrapper[5002]: E1203 16:32:57.841526 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:57 crc kubenswrapper[5002]: I1203 16:32:57.841708 5002 scope.go:117] "RemoveContainer" containerID="4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b" Dec 03 16:32:57 crc kubenswrapper[5002]: E1203 16:32:57.841968 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:32:57 crc kubenswrapper[5002]: E1203 16:32:57.841975 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" Dec 03 16:32:57 crc kubenswrapper[5002]: I1203 16:32:57.862632 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 03 16:32:58 crc kubenswrapper[5002]: I1203 16:32:58.839995 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:32:58 crc kubenswrapper[5002]: I1203 16:32:58.840074 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:32:58 crc kubenswrapper[5002]: E1203 16:32:58.840119 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:32:58 crc kubenswrapper[5002]: E1203 16:32:58.840263 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:32:59 crc kubenswrapper[5002]: I1203 16:32:59.840051 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:32:59 crc kubenswrapper[5002]: I1203 16:32:59.840084 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:32:59 crc kubenswrapper[5002]: E1203 16:32:59.840232 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:32:59 crc kubenswrapper[5002]: E1203 16:32:59.840394 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:00 crc kubenswrapper[5002]: I1203 16:33:00.839796 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:00 crc kubenswrapper[5002]: I1203 16:33:00.839807 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:00 crc kubenswrapper[5002]: E1203 16:33:00.840053 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:00 crc kubenswrapper[5002]: E1203 16:33:00.840161 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:01 crc kubenswrapper[5002]: I1203 16:33:01.319908 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:01 crc kubenswrapper[5002]: E1203 16:33:01.320077 5002 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:33:01 crc kubenswrapper[5002]: E1203 16:33:01.320151 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs podName:24141739-e7a8-40cf-ab9e-267ee876230b nodeName:}" failed. No retries permitted until 2025-12-03 16:34:05.320132144 +0000 UTC m=+168.733954032 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs") pod "network-metrics-daemon-c7qvw" (UID: "24141739-e7a8-40cf-ab9e-267ee876230b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:33:01 crc kubenswrapper[5002]: I1203 16:33:01.839717 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:01 crc kubenswrapper[5002]: I1203 16:33:01.839836 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:01 crc kubenswrapper[5002]: E1203 16:33:01.839939 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:01 crc kubenswrapper[5002]: E1203 16:33:01.840027 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:02 crc kubenswrapper[5002]: I1203 16:33:02.840032 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:02 crc kubenswrapper[5002]: I1203 16:33:02.840154 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:02 crc kubenswrapper[5002]: E1203 16:33:02.840242 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:02 crc kubenswrapper[5002]: E1203 16:33:02.840323 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:03 crc kubenswrapper[5002]: I1203 16:33:03.839638 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:03 crc kubenswrapper[5002]: I1203 16:33:03.839882 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:03 crc kubenswrapper[5002]: E1203 16:33:03.839983 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:03 crc kubenswrapper[5002]: E1203 16:33:03.840121 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:04 crc kubenswrapper[5002]: I1203 16:33:04.840119 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:04 crc kubenswrapper[5002]: I1203 16:33:04.840200 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:04 crc kubenswrapper[5002]: E1203 16:33:04.840342 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:04 crc kubenswrapper[5002]: E1203 16:33:04.840528 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:05 crc kubenswrapper[5002]: I1203 16:33:05.839969 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:05 crc kubenswrapper[5002]: I1203 16:33:05.840063 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:05 crc kubenswrapper[5002]: E1203 16:33:05.840501 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:05 crc kubenswrapper[5002]: E1203 16:33:05.840825 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:06 crc kubenswrapper[5002]: I1203 16:33:06.840411 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:06 crc kubenswrapper[5002]: I1203 16:33:06.840463 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:06 crc kubenswrapper[5002]: E1203 16:33:06.842105 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:06 crc kubenswrapper[5002]: E1203 16:33:06.842276 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:06 crc kubenswrapper[5002]: I1203 16:33:06.873780 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=9.873757413 podStartE2EDuration="9.873757413s" podCreationTimestamp="2025-12-03 16:32:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:06.873146168 +0000 UTC m=+110.286968106" watchObservedRunningTime="2025-12-03 16:33:06.873757413 +0000 UTC m=+110.287579301" Dec 03 16:33:07 crc kubenswrapper[5002]: I1203 16:33:07.839830 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:07 crc kubenswrapper[5002]: I1203 16:33:07.839894 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:07 crc kubenswrapper[5002]: E1203 16:33:07.840489 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:07 crc kubenswrapper[5002]: E1203 16:33:07.840857 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:08 crc kubenswrapper[5002]: I1203 16:33:08.839554 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:08 crc kubenswrapper[5002]: E1203 16:33:08.839875 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:08 crc kubenswrapper[5002]: I1203 16:33:08.839627 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:08 crc kubenswrapper[5002]: E1203 16:33:08.840182 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:09 crc kubenswrapper[5002]: I1203 16:33:09.839350 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:09 crc kubenswrapper[5002]: I1203 16:33:09.839395 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:09 crc kubenswrapper[5002]: E1203 16:33:09.839663 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:09 crc kubenswrapper[5002]: E1203 16:33:09.839919 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:09 crc kubenswrapper[5002]: I1203 16:33:09.840664 5002 scope.go:117] "RemoveContainer" containerID="4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b" Dec 03 16:33:09 crc kubenswrapper[5002]: E1203 16:33:09.840823 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4szh5_openshift-ovn-kubernetes(fc13f2ec-2d1e-4432-9f8d-82079a9dfe01)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" Dec 03 16:33:10 crc kubenswrapper[5002]: I1203 16:33:10.839983 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:10 crc kubenswrapper[5002]: I1203 16:33:10.840114 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:10 crc kubenswrapper[5002]: E1203 16:33:10.840290 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:10 crc kubenswrapper[5002]: E1203 16:33:10.840474 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:11 crc kubenswrapper[5002]: I1203 16:33:11.839673 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:11 crc kubenswrapper[5002]: I1203 16:33:11.839983 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:11 crc kubenswrapper[5002]: E1203 16:33:11.840244 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:11 crc kubenswrapper[5002]: E1203 16:33:11.840377 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:12 crc kubenswrapper[5002]: I1203 16:33:12.839560 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:12 crc kubenswrapper[5002]: I1203 16:33:12.839611 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:12 crc kubenswrapper[5002]: E1203 16:33:12.839699 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:12 crc kubenswrapper[5002]: E1203 16:33:12.839920 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:13 crc kubenswrapper[5002]: I1203 16:33:13.839444 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:13 crc kubenswrapper[5002]: E1203 16:33:13.839578 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:13 crc kubenswrapper[5002]: I1203 16:33:13.839641 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:13 crc kubenswrapper[5002]: E1203 16:33:13.839815 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:14 crc kubenswrapper[5002]: I1203 16:33:14.841051 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:14 crc kubenswrapper[5002]: E1203 16:33:14.841255 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:14 crc kubenswrapper[5002]: I1203 16:33:14.841056 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:14 crc kubenswrapper[5002]: E1203 16:33:14.841590 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:15 crc kubenswrapper[5002]: I1203 16:33:15.839401 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:15 crc kubenswrapper[5002]: I1203 16:33:15.839703 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:15 crc kubenswrapper[5002]: E1203 16:33:15.839716 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:15 crc kubenswrapper[5002]: E1203 16:33:15.839852 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:16 crc kubenswrapper[5002]: I1203 16:33:16.839618 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:16 crc kubenswrapper[5002]: I1203 16:33:16.839618 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:16 crc kubenswrapper[5002]: E1203 16:33:16.841551 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:16 crc kubenswrapper[5002]: E1203 16:33:16.842249 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:16 crc kubenswrapper[5002]: E1203 16:33:16.872934 5002 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 03 16:33:16 crc kubenswrapper[5002]: E1203 16:33:16.926951 5002 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:33:17 crc kubenswrapper[5002]: I1203 16:33:17.839593 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:17 crc kubenswrapper[5002]: I1203 16:33:17.839617 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:17 crc kubenswrapper[5002]: E1203 16:33:17.839730 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:17 crc kubenswrapper[5002]: E1203 16:33:17.839837 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:18 crc kubenswrapper[5002]: I1203 16:33:18.541410 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gjxps_2de485fd-67c0-4be7-abb1-92509ea373da/kube-multus/1.log" Dec 03 16:33:18 crc kubenswrapper[5002]: I1203 16:33:18.542604 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gjxps_2de485fd-67c0-4be7-abb1-92509ea373da/kube-multus/0.log" Dec 03 16:33:18 crc kubenswrapper[5002]: I1203 16:33:18.542654 5002 generic.go:334] "Generic (PLEG): container finished" podID="2de485fd-67c0-4be7-abb1-92509ea373da" containerID="46d73cb93f4e004b38542b0972ec1eab4e61b4698c14cef11eae5b5aeb61c233" exitCode=1 Dec 03 16:33:18 crc kubenswrapper[5002]: I1203 16:33:18.542687 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gjxps" event={"ID":"2de485fd-67c0-4be7-abb1-92509ea373da","Type":"ContainerDied","Data":"46d73cb93f4e004b38542b0972ec1eab4e61b4698c14cef11eae5b5aeb61c233"} Dec 03 16:33:18 crc kubenswrapper[5002]: I1203 16:33:18.542724 5002 scope.go:117] "RemoveContainer" containerID="cd4d7c04a1d963996c64b398f1af63b1951cf069125400e4a169073e24f4f2e4" Dec 03 16:33:18 crc kubenswrapper[5002]: I1203 16:33:18.543742 5002 scope.go:117] "RemoveContainer" containerID="46d73cb93f4e004b38542b0972ec1eab4e61b4698c14cef11eae5b5aeb61c233" Dec 03 16:33:18 crc kubenswrapper[5002]: E1203 16:33:18.544419 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-gjxps_openshift-multus(2de485fd-67c0-4be7-abb1-92509ea373da)\"" pod="openshift-multus/multus-gjxps" podUID="2de485fd-67c0-4be7-abb1-92509ea373da" Dec 03 16:33:18 crc kubenswrapper[5002]: I1203 16:33:18.839683 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:18 crc kubenswrapper[5002]: I1203 16:33:18.839870 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:18 crc kubenswrapper[5002]: E1203 16:33:18.839995 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:18 crc kubenswrapper[5002]: E1203 16:33:18.840121 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:19 crc kubenswrapper[5002]: I1203 16:33:19.547522 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gjxps_2de485fd-67c0-4be7-abb1-92509ea373da/kube-multus/1.log" Dec 03 16:33:19 crc kubenswrapper[5002]: I1203 16:33:19.839944 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:19 crc kubenswrapper[5002]: I1203 16:33:19.840031 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:19 crc kubenswrapper[5002]: E1203 16:33:19.840807 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:19 crc kubenswrapper[5002]: E1203 16:33:19.841062 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:20 crc kubenswrapper[5002]: I1203 16:33:20.839474 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:20 crc kubenswrapper[5002]: I1203 16:33:20.839559 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:20 crc kubenswrapper[5002]: E1203 16:33:20.839680 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:20 crc kubenswrapper[5002]: E1203 16:33:20.839811 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:21 crc kubenswrapper[5002]: I1203 16:33:21.840205 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:21 crc kubenswrapper[5002]: I1203 16:33:21.840363 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:21 crc kubenswrapper[5002]: E1203 16:33:21.840373 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:21 crc kubenswrapper[5002]: E1203 16:33:21.840543 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:21 crc kubenswrapper[5002]: I1203 16:33:21.841867 5002 scope.go:117] "RemoveContainer" containerID="4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b" Dec 03 16:33:21 crc kubenswrapper[5002]: E1203 16:33:21.928362 5002 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:33:22 crc kubenswrapper[5002]: I1203 16:33:22.562209 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/3.log" Dec 03 16:33:22 crc kubenswrapper[5002]: I1203 16:33:22.565860 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerStarted","Data":"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529"} Dec 03 16:33:22 crc kubenswrapper[5002]: I1203 16:33:22.566411 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:33:22 crc kubenswrapper[5002]: I1203 16:33:22.603356 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podStartSLOduration=100.603287453 podStartE2EDuration="1m40.603287453s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:22.599434063 +0000 UTC m=+126.013255981" watchObservedRunningTime="2025-12-03 16:33:22.603287453 +0000 UTC m=+126.017109371" Dec 03 16:33:22 crc kubenswrapper[5002]: I1203 16:33:22.729441 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-c7qvw"] Dec 03 16:33:22 crc kubenswrapper[5002]: I1203 16:33:22.729552 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:22 crc kubenswrapper[5002]: E1203 16:33:22.729641 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:22 crc kubenswrapper[5002]: I1203 16:33:22.854085 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:22 crc kubenswrapper[5002]: E1203 16:33:22.854446 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:23 crc kubenswrapper[5002]: I1203 16:33:23.839995 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:23 crc kubenswrapper[5002]: I1203 16:33:23.840068 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:23 crc kubenswrapper[5002]: E1203 16:33:23.840161 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:23 crc kubenswrapper[5002]: E1203 16:33:23.840281 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:24 crc kubenswrapper[5002]: I1203 16:33:24.839586 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:24 crc kubenswrapper[5002]: E1203 16:33:24.839768 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:24 crc kubenswrapper[5002]: I1203 16:33:24.839817 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:24 crc kubenswrapper[5002]: E1203 16:33:24.839946 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:25 crc kubenswrapper[5002]: I1203 16:33:25.840014 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:25 crc kubenswrapper[5002]: E1203 16:33:25.840236 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:25 crc kubenswrapper[5002]: I1203 16:33:25.840464 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:25 crc kubenswrapper[5002]: E1203 16:33:25.840540 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:26 crc kubenswrapper[5002]: I1203 16:33:26.840276 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:26 crc kubenswrapper[5002]: E1203 16:33:26.842356 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:26 crc kubenswrapper[5002]: I1203 16:33:26.842476 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:26 crc kubenswrapper[5002]: E1203 16:33:26.842633 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:26 crc kubenswrapper[5002]: E1203 16:33:26.929053 5002 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:33:27 crc kubenswrapper[5002]: I1203 16:33:27.839850 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:27 crc kubenswrapper[5002]: E1203 16:33:27.840187 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:27 crc kubenswrapper[5002]: I1203 16:33:27.839879 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:27 crc kubenswrapper[5002]: E1203 16:33:27.840401 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:28 crc kubenswrapper[5002]: I1203 16:33:28.840302 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:28 crc kubenswrapper[5002]: I1203 16:33:28.840483 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:28 crc kubenswrapper[5002]: E1203 16:33:28.840618 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:28 crc kubenswrapper[5002]: E1203 16:33:28.840800 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:29 crc kubenswrapper[5002]: I1203 16:33:29.840259 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:29 crc kubenswrapper[5002]: I1203 16:33:29.840314 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:29 crc kubenswrapper[5002]: E1203 16:33:29.840448 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:29 crc kubenswrapper[5002]: E1203 16:33:29.840795 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:29 crc kubenswrapper[5002]: I1203 16:33:29.840840 5002 scope.go:117] "RemoveContainer" containerID="46d73cb93f4e004b38542b0972ec1eab4e61b4698c14cef11eae5b5aeb61c233" Dec 03 16:33:30 crc kubenswrapper[5002]: I1203 16:33:30.604928 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gjxps_2de485fd-67c0-4be7-abb1-92509ea373da/kube-multus/1.log" Dec 03 16:33:30 crc kubenswrapper[5002]: I1203 16:33:30.605284 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gjxps" event={"ID":"2de485fd-67c0-4be7-abb1-92509ea373da","Type":"ContainerStarted","Data":"cd3179f37f5c9234d0dd5300e3b24e2e121394d7137b95d57682b75d5d266c1b"} Dec 03 16:33:30 crc kubenswrapper[5002]: I1203 16:33:30.842492 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:30 crc kubenswrapper[5002]: E1203 16:33:30.843624 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:30 crc kubenswrapper[5002]: I1203 16:33:30.843780 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:30 crc kubenswrapper[5002]: E1203 16:33:30.843915 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:31 crc kubenswrapper[5002]: I1203 16:33:31.839790 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:31 crc kubenswrapper[5002]: E1203 16:33:31.840006 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:31 crc kubenswrapper[5002]: I1203 16:33:31.839742 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:31 crc kubenswrapper[5002]: E1203 16:33:31.840286 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:31 crc kubenswrapper[5002]: E1203 16:33:31.930507 5002 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:33:32 crc kubenswrapper[5002]: I1203 16:33:32.839893 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:32 crc kubenswrapper[5002]: I1203 16:33:32.839995 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:32 crc kubenswrapper[5002]: E1203 16:33:32.840076 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:32 crc kubenswrapper[5002]: E1203 16:33:32.840222 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:33 crc kubenswrapper[5002]: I1203 16:33:33.840206 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:33 crc kubenswrapper[5002]: I1203 16:33:33.840239 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:33 crc kubenswrapper[5002]: E1203 16:33:33.840339 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:33 crc kubenswrapper[5002]: E1203 16:33:33.840488 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:34 crc kubenswrapper[5002]: I1203 16:33:34.840179 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:34 crc kubenswrapper[5002]: I1203 16:33:34.840188 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:34 crc kubenswrapper[5002]: E1203 16:33:34.840316 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:34 crc kubenswrapper[5002]: E1203 16:33:34.840404 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:35 crc kubenswrapper[5002]: I1203 16:33:35.840091 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:35 crc kubenswrapper[5002]: I1203 16:33:35.840124 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:35 crc kubenswrapper[5002]: E1203 16:33:35.840287 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:33:35 crc kubenswrapper[5002]: E1203 16:33:35.840429 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:33:36 crc kubenswrapper[5002]: I1203 16:33:36.839904 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:36 crc kubenswrapper[5002]: I1203 16:33:36.840012 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:36 crc kubenswrapper[5002]: E1203 16:33:36.840891 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:33:36 crc kubenswrapper[5002]: E1203 16:33:36.841147 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c7qvw" podUID="24141739-e7a8-40cf-ab9e-267ee876230b" Dec 03 16:33:37 crc kubenswrapper[5002]: I1203 16:33:37.840311 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:37 crc kubenswrapper[5002]: I1203 16:33:37.840445 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:37 crc kubenswrapper[5002]: I1203 16:33:37.843317 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 16:33:37 crc kubenswrapper[5002]: I1203 16:33:37.844194 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 16:33:38 crc kubenswrapper[5002]: I1203 16:33:38.840177 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:38 crc kubenswrapper[5002]: I1203 16:33:38.840408 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:33:38 crc kubenswrapper[5002]: I1203 16:33:38.842515 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 16:33:38 crc kubenswrapper[5002]: I1203 16:33:38.842694 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 16:33:38 crc kubenswrapper[5002]: I1203 16:33:38.842971 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 16:33:38 crc kubenswrapper[5002]: I1203 16:33:38.845691 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.401328 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.802386 5002 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.845395 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rr89p"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.845805 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.847843 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.848203 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.848468 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.848768 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.852084 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-sprqt"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.852676 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.853862 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.854072 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-68mq2"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.854342 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:41 crc kubenswrapper[5002]: W1203 16:33:41.854765 5002 reflector.go:561] object-"openshift-console-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Dec 03 16:33:41 crc kubenswrapper[5002]: E1203 16:33:41.854797 5002 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.855112 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.855507 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.855731 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.861261 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.861684 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.861722 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.861970 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.862345 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.862422 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.862526 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.862668 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.862874 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.862969 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.863018 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.863275 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.863484 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.863601 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.863627 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.863916 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.864006 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.864231 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-bpkhn"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.864784 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-bpkhn" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.865144 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.865429 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.866212 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cfssq"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.866831 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.869673 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-6zz9b"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.870360 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.870677 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.871071 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.871461 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ln6gt"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.872120 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.872605 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.882658 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-8xg65"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.885043 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.888563 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.907153 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.908846 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.909789 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.909945 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.910130 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.910254 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.910518 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.910623 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.910855 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.910869 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.911050 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.911074 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.911216 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.911507 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.911807 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.912227 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.912314 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.912534 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.913581 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.913697 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.913864 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-6qpdx"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.914330 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.915044 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.915266 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917391 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917429 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917451 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917528 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917550 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917576 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917633 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917395 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917654 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917726 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917720 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917873 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.917968 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.918003 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.918063 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.918102 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.926011 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.926010 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.926716 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.927174 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.927822 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.927931 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.928181 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.928324 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.928565 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.928599 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.929206 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.930714 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.931339 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-fm2l2"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.931928 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-fm2l2" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.931941 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.937015 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.937230 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.937520 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.937835 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.939717 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.939816 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.940014 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.940226 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.940303 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.940484 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.940650 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.941027 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.941145 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.941179 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.941215 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.941238 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.941187 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.941440 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.941458 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.941678 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.943389 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.943884 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.944203 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.963612 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.964229 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.964280 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.964421 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.994140 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-wtrm8"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.994430 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/756420f9-086d-4b08-8094-377c93482ca7-auth-proxy-config\") pod \"machine-approver-56656f9798-pzhk5\" (UID: \"756420f9-086d-4b08-8094-377c93482ca7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.994468 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvfxz\" (UniqueName: \"kubernetes.io/projected/756420f9-086d-4b08-8094-377c93482ca7-kube-api-access-pvfxz\") pod \"machine-approver-56656f9798-pzhk5\" (UID: \"756420f9-086d-4b08-8094-377c93482ca7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.994512 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/074f396d-24a0-47a4-836a-636ed06d95e6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-s57v6\" (UID: \"074f396d-24a0-47a4-836a-636ed06d95e6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.994533 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65d93135-ae61-4e1a-be22-29f7dfee2808-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.994551 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-oauth-serving-cert\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.994568 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62df1cbe-97fd-495e-8249-0a697e526ec9-trusted-ca\") pod \"console-operator-58897d9998-sprqt\" (UID: \"62df1cbe-97fd-495e-8249-0a697e526ec9\") " pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.994587 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.994609 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6424af90-f4da-452e-a0fe-0cc758f2dc7f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-4kvb4\" (UID: \"6424af90-f4da-452e-a0fe-0cc758f2dc7f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.994632 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.994803 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp6zj\" (UniqueName: \"kubernetes.io/projected/62df1cbe-97fd-495e-8249-0a697e526ec9-kube-api-access-wp6zj\") pod \"console-operator-58897d9998-sprqt\" (UID: \"62df1cbe-97fd-495e-8249-0a697e526ec9\") " pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.994905 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.995509 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.995673 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.995806 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.996445 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.995109 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-audit-dir\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.996645 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.996670 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-audit\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.996685 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rpfg\" (UniqueName: \"kubernetes.io/projected/6424af90-f4da-452e-a0fe-0cc758f2dc7f-kube-api-access-9rpfg\") pod \"openshift-apiserver-operator-796bbdcf4f-4kvb4\" (UID: \"6424af90-f4da-452e-a0fe-0cc758f2dc7f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.996702 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62df1cbe-97fd-495e-8249-0a697e526ec9-config\") pod \"console-operator-58897d9998-sprqt\" (UID: \"62df1cbe-97fd-495e-8249-0a697e526ec9\") " pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.996719 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62df1cbe-97fd-495e-8249-0a697e526ec9-serving-cert\") pod \"console-operator-58897d9998-sprqt\" (UID: \"62df1cbe-97fd-495e-8249-0a697e526ec9\") " pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.996819 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.996837 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6v2t\" (UniqueName: \"kubernetes.io/projected/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-kube-api-access-r6v2t\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.996856 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-trusted-ca-bundle\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.996881 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv2sx\" (UniqueName: \"kubernetes.io/projected/5fa63999-6c0a-4b33-8585-ef7c04ceab79-kube-api-access-hv2sx\") pod \"downloads-7954f5f757-bpkhn\" (UID: \"5fa63999-6c0a-4b33-8585-ef7c04ceab79\") " pod="openshift-console/downloads-7954f5f757-bpkhn" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.996906 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.997088 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk"] Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.997167 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-service-ca\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.997501 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.997719 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.998546 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999193 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6424af90-f4da-452e-a0fe-0cc758f2dc7f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-4kvb4\" (UID: \"6424af90-f4da-452e-a0fe-0cc758f2dc7f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999222 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999241 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-serving-cert\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999260 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-oauth-config\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999277 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65d93135-ae61-4e1a-be22-29f7dfee2808-serving-cert\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999292 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f3e66a3-d515-4057-86c3-c0d956189e98-serving-cert\") pod \"openshift-config-operator-7777fb866f-hrqfx\" (UID: \"3f3e66a3-d515-4057-86c3-c0d956189e98\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999309 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/074f396d-24a0-47a4-836a-636ed06d95e6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-s57v6\" (UID: \"074f396d-24a0-47a4-836a-636ed06d95e6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999325 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65d93135-ae61-4e1a-be22-29f7dfee2808-service-ca-bundle\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999341 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc315c4e-b735-4cd3-92d2-34b505810a5d-serving-cert\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999363 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999379 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6cc7\" (UniqueName: \"kubernetes.io/projected/fc315c4e-b735-4cd3-92d2-34b505810a5d-kube-api-access-s6cc7\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999395 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kts9\" (UniqueName: \"kubernetes.io/projected/65d93135-ae61-4e1a-be22-29f7dfee2808-kube-api-access-2kts9\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999411 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fc315c4e-b735-4cd3-92d2-34b505810a5d-etcd-client\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999449 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-etcd-serving-ca\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999481 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999502 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntddk\" (UniqueName: \"kubernetes.io/projected/3f3e66a3-d515-4057-86c3-c0d956189e98-kube-api-access-ntddk\") pod \"openshift-config-operator-7777fb866f-hrqfx\" (UID: \"3f3e66a3-d515-4057-86c3-c0d956189e98\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999522 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fc315c4e-b735-4cd3-92d2-34b505810a5d-node-pullsecrets\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999538 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999559 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fc315c4e-b735-4cd3-92d2-34b505810a5d-audit-dir\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999589 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999607 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999623 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-config\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999644 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr2jp\" (UniqueName: \"kubernetes.io/projected/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-kube-api-access-sr2jp\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999676 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-image-import-ca\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999690 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999709 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-config\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999726 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/756420f9-086d-4b08-8094-377c93482ca7-config\") pod \"machine-approver-56656f9798-pzhk5\" (UID: \"756420f9-086d-4b08-8094-377c93482ca7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:41 crc kubenswrapper[5002]: I1203 16:33:41.999762 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-serving-cert\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.000410 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dffhq\" (UniqueName: \"kubernetes.io/projected/074f396d-24a0-47a4-836a-636ed06d95e6-kube-api-access-dffhq\") pod \"openshift-controller-manager-operator-756b6f6bc6-s57v6\" (UID: \"074f396d-24a0-47a4-836a-636ed06d95e6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.001347 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fc315c4e-b735-4cd3-92d2-34b505810a5d-encryption-config\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.001375 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3f3e66a3-d515-4057-86c3-c0d956189e98-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hrqfx\" (UID: \"3f3e66a3-d515-4057-86c3-c0d956189e98\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.001625 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.002897 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/756420f9-086d-4b08-8094-377c93482ca7-machine-approver-tls\") pod \"machine-approver-56656f9798-pzhk5\" (UID: \"756420f9-086d-4b08-8094-377c93482ca7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.002933 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jldnw\" (UniqueName: \"kubernetes.io/projected/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-kube-api-access-jldnw\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.002953 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-client-ca\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.002972 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-config\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.002990 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65d93135-ae61-4e1a-be22-29f7dfee2808-config\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.003007 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-audit-policies\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.003506 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.003813 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.004014 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.004036 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.006866 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-68g57"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.006968 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.007642 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2vv8k"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.007866 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.008222 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2vv8k" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.008371 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.008835 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.009448 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.012930 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.013404 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7jwsj"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.013784 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.014206 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.014909 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.014978 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-7jwsj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.017112 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.018034 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.018227 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.018628 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.018905 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.019280 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.019870 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qqqkv"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.020459 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.020537 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.022687 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rr89p"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.023653 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.024564 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.024624 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.025399 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.026660 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.027072 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.028129 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.033851 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.034631 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.035730 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.036822 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.037003 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.037273 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.037873 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-jnmmj"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.038264 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.039186 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-sprqt"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.041088 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-bpkhn"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.042475 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.044717 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-68mq2"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.046041 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-dvz54"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.051809 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-dvz54" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.055853 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.065409 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.068451 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.069334 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.069359 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-fm2l2"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.069372 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.070626 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-8xg65"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.071867 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2vv8k"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.073926 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-68g57"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.075341 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.077535 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.078340 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.080489 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.082324 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.082391 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-dvz54"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.082763 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.088128 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.089642 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-6qpdx"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.091519 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.093084 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.093390 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-6zz9b"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.095137 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cfssq"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.097865 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.098704 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-qcm4d"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.099510 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-qcm4d" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.100535 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104014 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62df1cbe-97fd-495e-8249-0a697e526ec9-serving-cert\") pod \"console-operator-58897d9998-sprqt\" (UID: \"62df1cbe-97fd-495e-8249-0a697e526ec9\") " pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104096 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17833d15-ffa9-496b-8ee8-6f97fd7f976e-serving-cert\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104147 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-audit-dir\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104192 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104214 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bm9m\" (UniqueName: \"kubernetes.io/projected/3ffcb996-f277-4b13-942a-ff911dcf1899-kube-api-access-8bm9m\") pod \"control-plane-machine-set-operator-78cbb6b69f-d5jnd\" (UID: \"3ffcb996-f277-4b13-942a-ff911dcf1899\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104237 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv2sx\" (UniqueName: \"kubernetes.io/projected/5fa63999-6c0a-4b33-8585-ef7c04ceab79-kube-api-access-hv2sx\") pod \"downloads-7954f5f757-bpkhn\" (UID: \"5fa63999-6c0a-4b33-8585-ef7c04ceab79\") " pod="openshift-console/downloads-7954f5f757-bpkhn" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104256 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17833d15-ffa9-496b-8ee8-6f97fd7f976e-config\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104277 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-service-ca\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104351 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g5fr\" (UniqueName: \"kubernetes.io/projected/5cf52b45-df6d-44b7-92bb-703ae07fb259-kube-api-access-2g5fr\") pod \"migrator-59844c95c7-2vv8k\" (UID: \"5cf52b45-df6d-44b7-92bb-703ae07fb259\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2vv8k" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104377 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104397 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65d93135-ae61-4e1a-be22-29f7dfee2808-serving-cert\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104416 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104436 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-serving-cert\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104455 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/179eb6c2-e4c4-4e61-baed-5c5628c342c8-srv-cert\") pod \"catalog-operator-68c6474976-stdlq\" (UID: \"179eb6c2-e4c4-4e61-baed-5c5628c342c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104508 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47611376-613f-44da-9154-7c8f3dfa936c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zkzzk\" (UID: \"47611376-613f-44da-9154-7c8f3dfa936c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104542 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47611376-613f-44da-9154-7c8f3dfa936c-config\") pod \"kube-controller-manager-operator-78b949d7b-zkzzk\" (UID: \"47611376-613f-44da-9154-7c8f3dfa936c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104565 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0feb7832-783f-44ca-90d0-dd9685bf1031-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vnl7s\" (UID: \"0feb7832-783f-44ca-90d0-dd9685bf1031\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104584 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbff1165-ffbe-4eea-8541-ad40311df417-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6zxln\" (UID: \"dbff1165-ffbe-4eea-8541-ad40311df417\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104727 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/074f396d-24a0-47a4-836a-636ed06d95e6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-s57v6\" (UID: \"074f396d-24a0-47a4-836a-636ed06d95e6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104771 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6jrf\" (UniqueName: \"kubernetes.io/projected/43c67705-ae68-4ff3-a7d0-5666619f90bb-kube-api-access-r6jrf\") pod \"ingress-operator-5b745b69d9-r2gz4\" (UID: \"43c67705-ae68-4ff3-a7d0-5666619f90bb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104792 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9xf2\" (UniqueName: \"kubernetes.io/projected/306f45fb-8587-468f-8032-ea87c84f9953-kube-api-access-v9xf2\") pod \"olm-operator-6b444d44fb-9td8r\" (UID: \"306f45fb-8587-468f-8032-ea87c84f9953\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.104867 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105029 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47611376-613f-44da-9154-7c8f3dfa936c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zkzzk\" (UID: \"47611376-613f-44da-9154-7c8f3dfa936c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105054 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-serving-cert\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105104 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fc315c4e-b735-4cd3-92d2-34b505810a5d-etcd-client\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105130 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-etcd-serving-ca\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105232 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntddk\" (UniqueName: \"kubernetes.io/projected/3f3e66a3-d515-4057-86c3-c0d956189e98-kube-api-access-ntddk\") pod \"openshift-config-operator-7777fb866f-hrqfx\" (UID: \"3f3e66a3-d515-4057-86c3-c0d956189e98\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105279 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-default-certificate\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105312 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105372 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105398 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105446 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-config\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105471 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/17833d15-ffa9-496b-8ee8-6f97fd7f976e-etcd-ca\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105517 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/17833d15-ffa9-496b-8ee8-6f97fd7f976e-etcd-client\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105543 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105590 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqfln\" (UniqueName: \"kubernetes.io/projected/b5cbe8f4-807c-412d-91e5-bb1a5b6c677a-kube-api-access-mqfln\") pod \"dns-operator-744455d44c-fm2l2\" (UID: \"b5cbe8f4-807c-412d-91e5-bb1a5b6c677a\") " pod="openshift-dns-operator/dns-operator-744455d44c-fm2l2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105616 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3fad564d-947c-411c-b1e7-0e5d82ebb310-config\") pod \"route-controller-manager-6576b87f9c-5prhq\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105666 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.105698 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-etcd-client\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106068 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-audit-policies\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106100 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b1ff8cc-9661-4d44-8364-96c766b70087-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-x6qbh\" (UID: \"2b1ff8cc-9661-4d44-8364-96c766b70087\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106127 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106155 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-serving-cert\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106183 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-service-ca-bundle\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106209 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/859891e6-8a79-45cc-91af-d40414173836-config\") pod \"kube-apiserver-operator-766d6c64bb-g4prj\" (UID: \"859891e6-8a79-45cc-91af-d40414173836\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106234 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-encryption-config\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106274 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/306f45fb-8587-468f-8032-ea87c84f9953-srv-cert\") pod \"olm-operator-6b444d44fb-9td8r\" (UID: \"306f45fb-8587-468f-8032-ea87c84f9953\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106304 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dffhq\" (UniqueName: \"kubernetes.io/projected/074f396d-24a0-47a4-836a-636ed06d95e6-kube-api-access-dffhq\") pod \"openshift-controller-manager-operator-756b6f6bc6-s57v6\" (UID: \"074f396d-24a0-47a4-836a-636ed06d95e6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106374 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b5cbe8f4-807c-412d-91e5-bb1a5b6c677a-metrics-tls\") pod \"dns-operator-744455d44c-fm2l2\" (UID: \"b5cbe8f4-807c-412d-91e5-bb1a5b6c677a\") " pod="openshift-dns-operator/dns-operator-744455d44c-fm2l2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106457 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/43c67705-ae68-4ff3-a7d0-5666619f90bb-trusted-ca\") pod \"ingress-operator-5b745b69d9-r2gz4\" (UID: \"43c67705-ae68-4ff3-a7d0-5666619f90bb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106534 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3f3e66a3-d515-4057-86c3-c0d956189e98-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hrqfx\" (UID: \"3f3e66a3-d515-4057-86c3-c0d956189e98\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106568 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jldnw\" (UniqueName: \"kubernetes.io/projected/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-kube-api-access-jldnw\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106606 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/64b9dc30-c0fb-461c-aa13-fe92db94c162-proxy-tls\") pod \"machine-config-controller-84d6567774-9hc4g\" (UID: \"64b9dc30-c0fb-461c-aa13-fe92db94c162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106635 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-config\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106660 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-client-ca\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106687 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dbff1165-ffbe-4eea-8541-ad40311df417-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6zxln\" (UID: \"dbff1165-ffbe-4eea-8541-ad40311df417\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106713 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/074f396d-24a0-47a4-836a-636ed06d95e6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-s57v6\" (UID: \"074f396d-24a0-47a4-836a-636ed06d95e6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106793 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvfxz\" (UniqueName: \"kubernetes.io/projected/756420f9-086d-4b08-8094-377c93482ca7-kube-api-access-pvfxz\") pod \"machine-approver-56656f9798-pzhk5\" (UID: \"756420f9-086d-4b08-8094-377c93482ca7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106826 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65d93135-ae61-4e1a-be22-29f7dfee2808-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106851 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-oauth-serving-cert\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106878 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106902 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fad564d-947c-411c-b1e7-0e5d82ebb310-serving-cert\") pod \"route-controller-manager-6576b87f9c-5prhq\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106932 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbff1165-ffbe-4eea-8541-ad40311df417-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6zxln\" (UID: \"dbff1165-ffbe-4eea-8541-ad40311df417\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.106976 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-audit\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107005 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp6zj\" (UniqueName: \"kubernetes.io/projected/62df1cbe-97fd-495e-8249-0a697e526ec9-kube-api-access-wp6zj\") pod \"console-operator-58897d9998-sprqt\" (UID: \"62df1cbe-97fd-495e-8249-0a697e526ec9\") " pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107031 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rpfg\" (UniqueName: \"kubernetes.io/projected/6424af90-f4da-452e-a0fe-0cc758f2dc7f-kube-api-access-9rpfg\") pod \"openshift-apiserver-operator-796bbdcf4f-4kvb4\" (UID: \"6424af90-f4da-452e-a0fe-0cc758f2dc7f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107061 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-metrics-certs\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107130 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62df1cbe-97fd-495e-8249-0a697e526ec9-config\") pod \"console-operator-58897d9998-sprqt\" (UID: \"62df1cbe-97fd-495e-8249-0a697e526ec9\") " pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107160 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd5lc\" (UniqueName: \"kubernetes.io/projected/179eb6c2-e4c4-4e61-baed-5c5628c342c8-kube-api-access-pd5lc\") pod \"catalog-operator-68c6474976-stdlq\" (UID: \"179eb6c2-e4c4-4e61-baed-5c5628c342c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107201 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6v2t\" (UniqueName: \"kubernetes.io/projected/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-kube-api-access-r6v2t\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107267 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-trusted-ca-bundle\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107308 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107419 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/64b9dc30-c0fb-461c-aa13-fe92db94c162-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9hc4g\" (UID: \"64b9dc30-c0fb-461c-aa13-fe92db94c162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107455 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/43c67705-ae68-4ff3-a7d0-5666619f90bb-metrics-tls\") pod \"ingress-operator-5b745b69d9-r2gz4\" (UID: \"43c67705-ae68-4ff3-a7d0-5666619f90bb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107512 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/306f45fb-8587-468f-8032-ea87c84f9953-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9td8r\" (UID: \"306f45fb-8587-468f-8032-ea87c84f9953\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107543 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd4br\" (UniqueName: \"kubernetes.io/projected/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-kube-api-access-qd4br\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107595 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107628 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6424af90-f4da-452e-a0fe-0cc758f2dc7f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-4kvb4\" (UID: \"6424af90-f4da-452e-a0fe-0cc758f2dc7f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107683 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9p7h6\" (UniqueName: \"kubernetes.io/projected/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-kube-api-access-9p7h6\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.107775 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-oauth-config\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108015 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f3e66a3-d515-4057-86c3-c0d956189e98-serving-cert\") pod \"openshift-config-operator-7777fb866f-hrqfx\" (UID: \"3f3e66a3-d515-4057-86c3-c0d956189e98\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108126 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b1ff8cc-9661-4d44-8364-96c766b70087-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-x6qbh\" (UID: \"2b1ff8cc-9661-4d44-8364-96c766b70087\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108155 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0feb7832-783f-44ca-90d0-dd9685bf1031-images\") pod \"machine-config-operator-74547568cd-vnl7s\" (UID: \"0feb7832-783f-44ca-90d0-dd9685bf1031\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108187 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8mnj\" (UniqueName: \"kubernetes.io/projected/d77e2cb1-507a-44f6-b273-d2140d626699-kube-api-access-q8mnj\") pod \"package-server-manager-789f6589d5-dqgqr\" (UID: \"d77e2cb1-507a-44f6-b273-d2140d626699\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108223 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65d93135-ae61-4e1a-be22-29f7dfee2808-service-ca-bundle\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108252 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/859891e6-8a79-45cc-91af-d40414173836-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-g4prj\" (UID: \"859891e6-8a79-45cc-91af-d40414173836\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108280 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3fad564d-947c-411c-b1e7-0e5d82ebb310-client-ca\") pod \"route-controller-manager-6576b87f9c-5prhq\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108308 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc315c4e-b735-4cd3-92d2-34b505810a5d-serving-cert\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108338 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108366 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6cc7\" (UniqueName: \"kubernetes.io/projected/fc315c4e-b735-4cd3-92d2-34b505810a5d-kube-api-access-s6cc7\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108392 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kts9\" (UniqueName: \"kubernetes.io/projected/65d93135-ae61-4e1a-be22-29f7dfee2808-kube-api-access-2kts9\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108423 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e8cf598-b803-4504-a472-49efee59fd59-config\") pod \"machine-api-operator-5694c8668f-8xg65\" (UID: \"6e8cf598-b803-4504-a472-49efee59fd59\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108475 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108510 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/859891e6-8a79-45cc-91af-d40414173836-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-g4prj\" (UID: \"859891e6-8a79-45cc-91af-d40414173836\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108539 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fc315c4e-b735-4cd3-92d2-34b505810a5d-node-pullsecrets\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108566 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fc315c4e-b735-4cd3-92d2-34b505810a5d-audit-dir\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108617 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/392438fa-8a16-4db3-9eb2-a37287f5b558-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-j964n\" (UID: \"392438fa-8a16-4db3-9eb2-a37287f5b558\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108649 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbp5w\" (UniqueName: \"kubernetes.io/projected/64b9dc30-c0fb-461c-aa13-fe92db94c162-kube-api-access-vbp5w\") pod \"machine-config-controller-84d6567774-9hc4g\" (UID: \"64b9dc30-c0fb-461c-aa13-fe92db94c162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108676 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx24n\" (UniqueName: \"kubernetes.io/projected/3fad564d-947c-411c-b1e7-0e5d82ebb310-kube-api-access-cx24n\") pod \"route-controller-manager-6576b87f9c-5prhq\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108707 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hg7qm\" (UniqueName: \"kubernetes.io/projected/392438fa-8a16-4db3-9eb2-a37287f5b558-kube-api-access-hg7qm\") pod \"cluster-samples-operator-665b6dd947-j964n\" (UID: \"392438fa-8a16-4db3-9eb2-a37287f5b558\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108736 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f8mj\" (UniqueName: \"kubernetes.io/projected/0feb7832-783f-44ca-90d0-dd9685bf1031-kube-api-access-7f8mj\") pod \"machine-config-operator-74547568cd-vnl7s\" (UID: \"0feb7832-783f-44ca-90d0-dd9685bf1031\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108798 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr2jp\" (UniqueName: \"kubernetes.io/projected/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-kube-api-access-sr2jp\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108845 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7whdl\" (UniqueName: \"kubernetes.io/projected/892ffee2-4865-49b9-aaed-18176803dabb-kube-api-access-7whdl\") pod \"multus-admission-controller-857f4d67dd-7jwsj\" (UID: \"892ffee2-4865-49b9-aaed-18176803dabb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7jwsj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108870 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6e8cf598-b803-4504-a472-49efee59fd59-images\") pod \"machine-api-operator-5694c8668f-8xg65\" (UID: \"6e8cf598-b803-4504-a472-49efee59fd59\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108900 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-image-import-ca\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108930 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108963 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-config\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.108992 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/756420f9-086d-4b08-8094-377c93482ca7-config\") pod \"machine-approver-56656f9798-pzhk5\" (UID: \"756420f9-086d-4b08-8094-377c93482ca7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109021 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6e8cf598-b803-4504-a472-49efee59fd59-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-8xg65\" (UID: \"6e8cf598-b803-4504-a472-49efee59fd59\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109048 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvfvb\" (UniqueName: \"kubernetes.io/projected/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-kube-api-access-xvfvb\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109077 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f42gq\" (UniqueName: \"kubernetes.io/projected/2b1ff8cc-9661-4d44-8364-96c766b70087-kube-api-access-f42gq\") pod \"kube-storage-version-migrator-operator-b67b599dd-x6qbh\" (UID: \"2b1ff8cc-9661-4d44-8364-96c766b70087\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109109 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0feb7832-783f-44ca-90d0-dd9685bf1031-proxy-tls\") pod \"machine-config-operator-74547568cd-vnl7s\" (UID: \"0feb7832-783f-44ca-90d0-dd9685bf1031\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109139 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fc315c4e-b735-4cd3-92d2-34b505810a5d-encryption-config\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109228 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/756420f9-086d-4b08-8094-377c93482ca7-machine-approver-tls\") pod \"machine-approver-56656f9798-pzhk5\" (UID: \"756420f9-086d-4b08-8094-377c93482ca7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109353 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fccw\" (UniqueName: \"kubernetes.io/projected/6e8cf598-b803-4504-a472-49efee59fd59-kube-api-access-7fccw\") pod \"machine-api-operator-5694c8668f-8xg65\" (UID: \"6e8cf598-b803-4504-a472-49efee59fd59\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109390 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/3ffcb996-f277-4b13-942a-ff911dcf1899-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-d5jnd\" (UID: \"3ffcb996-f277-4b13-942a-ff911dcf1899\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109418 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/179eb6c2-e4c4-4e61-baed-5c5628c342c8-profile-collector-cert\") pod \"catalog-operator-68c6474976-stdlq\" (UID: \"179eb6c2-e4c4-4e61-baed-5c5628c342c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109445 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/17833d15-ffa9-496b-8ee8-6f97fd7f976e-etcd-service-ca\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109476 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65d93135-ae61-4e1a-be22-29f7dfee2808-config\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109504 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-audit-policies\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109610 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/756420f9-086d-4b08-8094-377c93482ca7-auth-proxy-config\") pod \"machine-approver-56656f9798-pzhk5\" (UID: \"756420f9-086d-4b08-8094-377c93482ca7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109646 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/892ffee2-4865-49b9-aaed-18176803dabb-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7jwsj\" (UID: \"892ffee2-4865-49b9-aaed-18176803dabb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7jwsj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109673 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/43c67705-ae68-4ff3-a7d0-5666619f90bb-bound-sa-token\") pod \"ingress-operator-5b745b69d9-r2gz4\" (UID: \"43c67705-ae68-4ff3-a7d0-5666619f90bb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109698 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzrmz\" (UniqueName: \"kubernetes.io/projected/17833d15-ffa9-496b-8ee8-6f97fd7f976e-kube-api-access-gzrmz\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109726 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/d77e2cb1-507a-44f6-b273-d2140d626699-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dqgqr\" (UID: \"d77e2cb1-507a-44f6-b273-d2140d626699\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109778 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62df1cbe-97fd-495e-8249-0a697e526ec9-trusted-ca\") pod \"console-operator-58897d9998-sprqt\" (UID: \"62df1cbe-97fd-495e-8249-0a697e526ec9\") " pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109806 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6424af90-f4da-452e-a0fe-0cc758f2dc7f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-4kvb4\" (UID: \"6424af90-f4da-452e-a0fe-0cc758f2dc7f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109833 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109860 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-stats-auth\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109893 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-audit-dir\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.109921 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.110284 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.110335 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62df1cbe-97fd-495e-8249-0a697e526ec9-serving-cert\") pod \"console-operator-58897d9998-sprqt\" (UID: \"62df1cbe-97fd-495e-8249-0a697e526ec9\") " pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.110382 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-vffvr"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.111171 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-serving-cert\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.111612 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.111648 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.111666 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.111775 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-service-ca\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.111798 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vffvr" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.111985 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.112108 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-serving-cert\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.113805 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.113858 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fc315c4e-b735-4cd3-92d2-34b505810a5d-node-pullsecrets\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.113867 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6424af90-f4da-452e-a0fe-0cc758f2dc7f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-4kvb4\" (UID: \"6424af90-f4da-452e-a0fe-0cc758f2dc7f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.114057 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fc315c4e-b735-4cd3-92d2-34b505810a5d-audit-dir\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.114526 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/756420f9-086d-4b08-8094-377c93482ca7-config\") pod \"machine-approver-56656f9798-pzhk5\" (UID: \"756420f9-086d-4b08-8094-377c93482ca7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.114564 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-audit\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.114870 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-oauth-serving-cert\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.115293 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/756420f9-086d-4b08-8094-377c93482ca7-auth-proxy-config\") pod \"machine-approver-56656f9798-pzhk5\" (UID: \"756420f9-086d-4b08-8094-377c93482ca7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.115811 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-audit-dir\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.115964 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65d93135-ae61-4e1a-be22-29f7dfee2808-service-ca-bundle\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.116267 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65d93135-ae61-4e1a-be22-29f7dfee2808-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.116277 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6424af90-f4da-452e-a0fe-0cc758f2dc7f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-4kvb4\" (UID: \"6424af90-f4da-452e-a0fe-0cc758f2dc7f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.116661 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.116676 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7jwsj"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.117193 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-config\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.117805 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-config\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.117852 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-image-import-ca\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.117946 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62df1cbe-97fd-495e-8249-0a697e526ec9-config\") pod \"console-operator-58897d9998-sprqt\" (UID: \"62df1cbe-97fd-495e-8249-0a697e526ec9\") " pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.118070 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65d93135-ae61-4e1a-be22-29f7dfee2808-config\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.118143 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.118403 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.118913 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.119222 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc315c4e-b735-4cd3-92d2-34b505810a5d-serving-cert\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.119258 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.119291 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62df1cbe-97fd-495e-8249-0a697e526ec9-trusted-ca\") pod \"console-operator-58897d9998-sprqt\" (UID: \"62df1cbe-97fd-495e-8249-0a697e526ec9\") " pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.119554 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-audit-policies\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.119570 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65d93135-ae61-4e1a-be22-29f7dfee2808-serving-cert\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.119767 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-oauth-config\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.120271 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3f3e66a3-d515-4057-86c3-c0d956189e98-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hrqfx\" (UID: \"3f3e66a3-d515-4057-86c3-c0d956189e98\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.120402 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/074f396d-24a0-47a4-836a-636ed06d95e6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-s57v6\" (UID: \"074f396d-24a0-47a4-836a-636ed06d95e6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.120464 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-config\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.120602 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.120653 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f3e66a3-d515-4057-86c3-c0d956189e98-serving-cert\") pod \"openshift-config-operator-7777fb866f-hrqfx\" (UID: \"3f3e66a3-d515-4057-86c3-c0d956189e98\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.120954 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fc315c4e-b735-4cd3-92d2-34b505810a5d-encryption-config\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.121199 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.121255 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.121293 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-client-ca\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.121632 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/074f396d-24a0-47a4-836a-636ed06d95e6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-s57v6\" (UID: \"074f396d-24a0-47a4-836a-636ed06d95e6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.121665 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qqqkv"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.123065 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ln6gt"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.122585 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fc315c4e-b735-4cd3-92d2-34b505810a5d-etcd-client\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.123075 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.122661 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-trusted-ca-bundle\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.122717 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/756420f9-086d-4b08-8094-377c93482ca7-machine-approver-tls\") pod \"machine-approver-56656f9798-pzhk5\" (UID: \"756420f9-086d-4b08-8094-377c93482ca7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.122520 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fc315c4e-b735-4cd3-92d2-34b505810a5d-etcd-serving-ca\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.123140 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.123921 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.123960 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.125050 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.126261 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vffvr"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.127558 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.128810 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.128955 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-jnmmj"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.130204 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.131048 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-778kb"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.132556 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-778kb"] Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.132650 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.148384 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.168255 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.188721 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.208883 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.210904 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/392438fa-8a16-4db3-9eb2-a37287f5b558-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-j964n\" (UID: \"392438fa-8a16-4db3-9eb2-a37287f5b558\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.210956 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx24n\" (UniqueName: \"kubernetes.io/projected/3fad564d-947c-411c-b1e7-0e5d82ebb310-kube-api-access-cx24n\") pod \"route-controller-manager-6576b87f9c-5prhq\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.210981 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hg7qm\" (UniqueName: \"kubernetes.io/projected/392438fa-8a16-4db3-9eb2-a37287f5b558-kube-api-access-hg7qm\") pod \"cluster-samples-operator-665b6dd947-j964n\" (UID: \"392438fa-8a16-4db3-9eb2-a37287f5b558\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211008 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbp5w\" (UniqueName: \"kubernetes.io/projected/64b9dc30-c0fb-461c-aa13-fe92db94c162-kube-api-access-vbp5w\") pod \"machine-config-controller-84d6567774-9hc4g\" (UID: \"64b9dc30-c0fb-461c-aa13-fe92db94c162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211035 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f8mj\" (UniqueName: \"kubernetes.io/projected/0feb7832-783f-44ca-90d0-dd9685bf1031-kube-api-access-7f8mj\") pod \"machine-config-operator-74547568cd-vnl7s\" (UID: \"0feb7832-783f-44ca-90d0-dd9685bf1031\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211084 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7whdl\" (UniqueName: \"kubernetes.io/projected/892ffee2-4865-49b9-aaed-18176803dabb-kube-api-access-7whdl\") pod \"multus-admission-controller-857f4d67dd-7jwsj\" (UID: \"892ffee2-4865-49b9-aaed-18176803dabb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7jwsj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211117 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6e8cf598-b803-4504-a472-49efee59fd59-images\") pod \"machine-api-operator-5694c8668f-8xg65\" (UID: \"6e8cf598-b803-4504-a472-49efee59fd59\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211149 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6e8cf598-b803-4504-a472-49efee59fd59-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-8xg65\" (UID: \"6e8cf598-b803-4504-a472-49efee59fd59\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211176 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvfvb\" (UniqueName: \"kubernetes.io/projected/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-kube-api-access-xvfvb\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211203 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f42gq\" (UniqueName: \"kubernetes.io/projected/2b1ff8cc-9661-4d44-8364-96c766b70087-kube-api-access-f42gq\") pod \"kube-storage-version-migrator-operator-b67b599dd-x6qbh\" (UID: \"2b1ff8cc-9661-4d44-8364-96c766b70087\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211230 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0feb7832-783f-44ca-90d0-dd9685bf1031-proxy-tls\") pod \"machine-config-operator-74547568cd-vnl7s\" (UID: \"0feb7832-783f-44ca-90d0-dd9685bf1031\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211261 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fccw\" (UniqueName: \"kubernetes.io/projected/6e8cf598-b803-4504-a472-49efee59fd59-kube-api-access-7fccw\") pod \"machine-api-operator-5694c8668f-8xg65\" (UID: \"6e8cf598-b803-4504-a472-49efee59fd59\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211358 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/3ffcb996-f277-4b13-942a-ff911dcf1899-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-d5jnd\" (UID: \"3ffcb996-f277-4b13-942a-ff911dcf1899\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211517 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/179eb6c2-e4c4-4e61-baed-5c5628c342c8-profile-collector-cert\") pod \"catalog-operator-68c6474976-stdlq\" (UID: \"179eb6c2-e4c4-4e61-baed-5c5628c342c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211640 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/17833d15-ffa9-496b-8ee8-6f97fd7f976e-etcd-service-ca\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211680 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/892ffee2-4865-49b9-aaed-18176803dabb-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7jwsj\" (UID: \"892ffee2-4865-49b9-aaed-18176803dabb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7jwsj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211707 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/43c67705-ae68-4ff3-a7d0-5666619f90bb-bound-sa-token\") pod \"ingress-operator-5b745b69d9-r2gz4\" (UID: \"43c67705-ae68-4ff3-a7d0-5666619f90bb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211732 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzrmz\" (UniqueName: \"kubernetes.io/projected/17833d15-ffa9-496b-8ee8-6f97fd7f976e-kube-api-access-gzrmz\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211772 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/d77e2cb1-507a-44f6-b273-d2140d626699-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dqgqr\" (UID: \"d77e2cb1-507a-44f6-b273-d2140d626699\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211800 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-stats-auth\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211829 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17833d15-ffa9-496b-8ee8-6f97fd7f976e-serving-cert\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211853 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-audit-dir\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211880 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bm9m\" (UniqueName: \"kubernetes.io/projected/3ffcb996-f277-4b13-942a-ff911dcf1899-kube-api-access-8bm9m\") pod \"control-plane-machine-set-operator-78cbb6b69f-d5jnd\" (UID: \"3ffcb996-f277-4b13-942a-ff911dcf1899\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.211972 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17833d15-ffa9-496b-8ee8-6f97fd7f976e-config\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212038 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g5fr\" (UniqueName: \"kubernetes.io/projected/5cf52b45-df6d-44b7-92bb-703ae07fb259-kube-api-access-2g5fr\") pod \"migrator-59844c95c7-2vv8k\" (UID: \"5cf52b45-df6d-44b7-92bb-703ae07fb259\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2vv8k" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212069 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0feb7832-783f-44ca-90d0-dd9685bf1031-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vnl7s\" (UID: \"0feb7832-783f-44ca-90d0-dd9685bf1031\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212102 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbff1165-ffbe-4eea-8541-ad40311df417-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6zxln\" (UID: \"dbff1165-ffbe-4eea-8541-ad40311df417\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212113 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-audit-dir\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212128 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/179eb6c2-e4c4-4e61-baed-5c5628c342c8-srv-cert\") pod \"catalog-operator-68c6474976-stdlq\" (UID: \"179eb6c2-e4c4-4e61-baed-5c5628c342c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212181 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47611376-613f-44da-9154-7c8f3dfa936c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zkzzk\" (UID: \"47611376-613f-44da-9154-7c8f3dfa936c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212209 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47611376-613f-44da-9154-7c8f3dfa936c-config\") pod \"kube-controller-manager-operator-78b949d7b-zkzzk\" (UID: \"47611376-613f-44da-9154-7c8f3dfa936c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212220 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6e8cf598-b803-4504-a472-49efee59fd59-images\") pod \"machine-api-operator-5694c8668f-8xg65\" (UID: \"6e8cf598-b803-4504-a472-49efee59fd59\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212239 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6jrf\" (UniqueName: \"kubernetes.io/projected/43c67705-ae68-4ff3-a7d0-5666619f90bb-kube-api-access-r6jrf\") pod \"ingress-operator-5b745b69d9-r2gz4\" (UID: \"43c67705-ae68-4ff3-a7d0-5666619f90bb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212283 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9xf2\" (UniqueName: \"kubernetes.io/projected/306f45fb-8587-468f-8032-ea87c84f9953-kube-api-access-v9xf2\") pod \"olm-operator-6b444d44fb-9td8r\" (UID: \"306f45fb-8587-468f-8032-ea87c84f9953\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212306 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212326 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47611376-613f-44da-9154-7c8f3dfa936c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zkzzk\" (UID: \"47611376-613f-44da-9154-7c8f3dfa936c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212346 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-serving-cert\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212374 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-default-certificate\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212414 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/17833d15-ffa9-496b-8ee8-6f97fd7f976e-etcd-ca\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212429 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/17833d15-ffa9-496b-8ee8-6f97fd7f976e-etcd-client\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212447 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212484 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqfln\" (UniqueName: \"kubernetes.io/projected/b5cbe8f4-807c-412d-91e5-bb1a5b6c677a-kube-api-access-mqfln\") pod \"dns-operator-744455d44c-fm2l2\" (UID: \"b5cbe8f4-807c-412d-91e5-bb1a5b6c677a\") " pod="openshift-dns-operator/dns-operator-744455d44c-fm2l2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212502 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3fad564d-947c-411c-b1e7-0e5d82ebb310-config\") pod \"route-controller-manager-6576b87f9c-5prhq\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212518 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212537 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-etcd-client\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212558 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-audit-policies\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212576 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b1ff8cc-9661-4d44-8364-96c766b70087-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-x6qbh\" (UID: \"2b1ff8cc-9661-4d44-8364-96c766b70087\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212591 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212613 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-service-ca-bundle\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212629 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/859891e6-8a79-45cc-91af-d40414173836-config\") pod \"kube-apiserver-operator-766d6c64bb-g4prj\" (UID: \"859891e6-8a79-45cc-91af-d40414173836\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212645 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-encryption-config\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212670 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/306f45fb-8587-468f-8032-ea87c84f9953-srv-cert\") pod \"olm-operator-6b444d44fb-9td8r\" (UID: \"306f45fb-8587-468f-8032-ea87c84f9953\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212694 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b5cbe8f4-807c-412d-91e5-bb1a5b6c677a-metrics-tls\") pod \"dns-operator-744455d44c-fm2l2\" (UID: \"b5cbe8f4-807c-412d-91e5-bb1a5b6c677a\") " pod="openshift-dns-operator/dns-operator-744455d44c-fm2l2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212712 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/43c67705-ae68-4ff3-a7d0-5666619f90bb-trusted-ca\") pod \"ingress-operator-5b745b69d9-r2gz4\" (UID: \"43c67705-ae68-4ff3-a7d0-5666619f90bb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212740 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/64b9dc30-c0fb-461c-aa13-fe92db94c162-proxy-tls\") pod \"machine-config-controller-84d6567774-9hc4g\" (UID: \"64b9dc30-c0fb-461c-aa13-fe92db94c162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212782 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dbff1165-ffbe-4eea-8541-ad40311df417-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6zxln\" (UID: \"dbff1165-ffbe-4eea-8541-ad40311df417\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212811 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fad564d-947c-411c-b1e7-0e5d82ebb310-serving-cert\") pod \"route-controller-manager-6576b87f9c-5prhq\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212830 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbff1165-ffbe-4eea-8541-ad40311df417-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6zxln\" (UID: \"dbff1165-ffbe-4eea-8541-ad40311df417\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212861 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-metrics-certs\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212890 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd5lc\" (UniqueName: \"kubernetes.io/projected/179eb6c2-e4c4-4e61-baed-5c5628c342c8-kube-api-access-pd5lc\") pod \"catalog-operator-68c6474976-stdlq\" (UID: \"179eb6c2-e4c4-4e61-baed-5c5628c342c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212925 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/64b9dc30-c0fb-461c-aa13-fe92db94c162-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9hc4g\" (UID: \"64b9dc30-c0fb-461c-aa13-fe92db94c162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212941 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/43c67705-ae68-4ff3-a7d0-5666619f90bb-metrics-tls\") pod \"ingress-operator-5b745b69d9-r2gz4\" (UID: \"43c67705-ae68-4ff3-a7d0-5666619f90bb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212958 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212981 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/306f45fb-8587-468f-8032-ea87c84f9953-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9td8r\" (UID: \"306f45fb-8587-468f-8032-ea87c84f9953\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.213000 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd4br\" (UniqueName: \"kubernetes.io/projected/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-kube-api-access-qd4br\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.213019 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9p7h6\" (UniqueName: \"kubernetes.io/projected/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-kube-api-access-9p7h6\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.213040 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b1ff8cc-9661-4d44-8364-96c766b70087-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-x6qbh\" (UID: \"2b1ff8cc-9661-4d44-8364-96c766b70087\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.213057 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0feb7832-783f-44ca-90d0-dd9685bf1031-images\") pod \"machine-config-operator-74547568cd-vnl7s\" (UID: \"0feb7832-783f-44ca-90d0-dd9685bf1031\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.213073 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8mnj\" (UniqueName: \"kubernetes.io/projected/d77e2cb1-507a-44f6-b273-d2140d626699-kube-api-access-q8mnj\") pod \"package-server-manager-789f6589d5-dqgqr\" (UID: \"d77e2cb1-507a-44f6-b273-d2140d626699\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.213092 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/859891e6-8a79-45cc-91af-d40414173836-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-g4prj\" (UID: \"859891e6-8a79-45cc-91af-d40414173836\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.213109 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3fad564d-947c-411c-b1e7-0e5d82ebb310-client-ca\") pod \"route-controller-manager-6576b87f9c-5prhq\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.213139 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e8cf598-b803-4504-a472-49efee59fd59-config\") pod \"machine-api-operator-5694c8668f-8xg65\" (UID: \"6e8cf598-b803-4504-a472-49efee59fd59\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.213155 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/859891e6-8a79-45cc-91af-d40414173836-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-g4prj\" (UID: \"859891e6-8a79-45cc-91af-d40414173836\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.213776 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.213942 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3fad564d-947c-411c-b1e7-0e5d82ebb310-config\") pod \"route-controller-manager-6576b87f9c-5prhq\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.212956 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0feb7832-783f-44ca-90d0-dd9685bf1031-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vnl7s\" (UID: \"0feb7832-783f-44ca-90d0-dd9685bf1031\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.214538 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-service-ca-bundle\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.214817 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/392438fa-8a16-4db3-9eb2-a37287f5b558-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-j964n\" (UID: \"392438fa-8a16-4db3-9eb2-a37287f5b558\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.215039 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6e8cf598-b803-4504-a472-49efee59fd59-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-8xg65\" (UID: \"6e8cf598-b803-4504-a472-49efee59fd59\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.215115 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/859891e6-8a79-45cc-91af-d40414173836-config\") pod \"kube-apiserver-operator-766d6c64bb-g4prj\" (UID: \"859891e6-8a79-45cc-91af-d40414173836\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.215189 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3fad564d-947c-411c-b1e7-0e5d82ebb310-client-ca\") pod \"route-controller-manager-6576b87f9c-5prhq\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.215694 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e8cf598-b803-4504-a472-49efee59fd59-config\") pod \"machine-api-operator-5694c8668f-8xg65\" (UID: \"6e8cf598-b803-4504-a472-49efee59fd59\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.215982 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/64b9dc30-c0fb-461c-aa13-fe92db94c162-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9hc4g\" (UID: \"64b9dc30-c0fb-461c-aa13-fe92db94c162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.216016 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/43c67705-ae68-4ff3-a7d0-5666619f90bb-trusted-ca\") pod \"ingress-operator-5b745b69d9-r2gz4\" (UID: \"43c67705-ae68-4ff3-a7d0-5666619f90bb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.217443 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/43c67705-ae68-4ff3-a7d0-5666619f90bb-metrics-tls\") pod \"ingress-operator-5b745b69d9-r2gz4\" (UID: \"43c67705-ae68-4ff3-a7d0-5666619f90bb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.217450 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/859891e6-8a79-45cc-91af-d40414173836-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-g4prj\" (UID: \"859891e6-8a79-45cc-91af-d40414173836\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.218317 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fad564d-947c-411c-b1e7-0e5d82ebb310-serving-cert\") pod \"route-controller-manager-6576b87f9c-5prhq\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.218412 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-metrics-certs\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.218474 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b5cbe8f4-807c-412d-91e5-bb1a5b6c677a-metrics-tls\") pod \"dns-operator-744455d44c-fm2l2\" (UID: \"b5cbe8f4-807c-412d-91e5-bb1a5b6c677a\") " pod="openshift-dns-operator/dns-operator-744455d44c-fm2l2" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.228687 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.230285 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-stats-auth\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.236732 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-default-certificate\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.248315 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.253962 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47611376-613f-44da-9154-7c8f3dfa936c-config\") pod \"kube-controller-manager-operator-78b949d7b-zkzzk\" (UID: \"47611376-613f-44da-9154-7c8f3dfa936c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.270095 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.288157 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.296157 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47611376-613f-44da-9154-7c8f3dfa936c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zkzzk\" (UID: \"47611376-613f-44da-9154-7c8f3dfa936c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.308876 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.327848 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.348662 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.367355 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.378153 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-etcd-client\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.388397 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.396670 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-serving-cert\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.408958 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.419043 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-encryption-config\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.428513 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.433389 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-audit-policies\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.447649 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.455033 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.468825 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.476756 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.487593 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.509386 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.528862 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.547527 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.569513 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.588490 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.608710 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.615934 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17833d15-ffa9-496b-8ee8-6f97fd7f976e-serving-cert\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.630186 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.636933 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/17833d15-ffa9-496b-8ee8-6f97fd7f976e-etcd-client\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.648323 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.654169 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17833d15-ffa9-496b-8ee8-6f97fd7f976e-config\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.668172 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.674793 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/17833d15-ffa9-496b-8ee8-6f97fd7f976e-etcd-service-ca\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.688077 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.694567 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/17833d15-ffa9-496b-8ee8-6f97fd7f976e-etcd-ca\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.708415 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.728654 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.735892 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/3ffcb996-f277-4b13-942a-ff911dcf1899-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-d5jnd\" (UID: \"3ffcb996-f277-4b13-942a-ff911dcf1899\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.748975 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.788839 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.808887 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.817062 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b1ff8cc-9661-4d44-8364-96c766b70087-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-x6qbh\" (UID: \"2b1ff8cc-9661-4d44-8364-96c766b70087\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.828854 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.847853 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.856607 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b1ff8cc-9661-4d44-8364-96c766b70087-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-x6qbh\" (UID: \"2b1ff8cc-9661-4d44-8364-96c766b70087\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.867911 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.888570 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.908870 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.916815 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0feb7832-783f-44ca-90d0-dd9685bf1031-images\") pod \"machine-config-operator-74547568cd-vnl7s\" (UID: \"0feb7832-783f-44ca-90d0-dd9685bf1031\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.928976 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.936334 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0feb7832-783f-44ca-90d0-dd9685bf1031-proxy-tls\") pod \"machine-config-operator-74547568cd-vnl7s\" (UID: \"0feb7832-783f-44ca-90d0-dd9685bf1031\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.949141 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.968870 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.978536 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/892ffee2-4865-49b9-aaed-18176803dabb-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7jwsj\" (UID: \"892ffee2-4865-49b9-aaed-18176803dabb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7jwsj" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.988561 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 16:33:42 crc kubenswrapper[5002]: I1203 16:33:42.999683 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/64b9dc30-c0fb-461c-aa13-fe92db94c162-proxy-tls\") pod \"machine-config-controller-84d6567774-9hc4g\" (UID: \"64b9dc30-c0fb-461c-aa13-fe92db94c162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.009733 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.026853 5002 request.go:700] Waited for 1.008005588s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-scheduler-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.029238 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.048270 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.068696 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.073505 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbff1165-ffbe-4eea-8541-ad40311df417-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6zxln\" (UID: \"dbff1165-ffbe-4eea-8541-ad40311df417\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.089661 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.100453 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbff1165-ffbe-4eea-8541-ad40311df417-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6zxln\" (UID: \"dbff1165-ffbe-4eea-8541-ad40311df417\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.109694 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.115880 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/179eb6c2-e4c4-4e61-baed-5c5628c342c8-profile-collector-cert\") pod \"catalog-operator-68c6474976-stdlq\" (UID: \"179eb6c2-e4c4-4e61-baed-5c5628c342c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.120090 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/306f45fb-8587-468f-8032-ea87c84f9953-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9td8r\" (UID: \"306f45fb-8587-468f-8032-ea87c84f9953\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.128898 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.148424 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.158737 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/306f45fb-8587-468f-8032-ea87c84f9953-srv-cert\") pod \"olm-operator-6b444d44fb-9td8r\" (UID: \"306f45fb-8587-468f-8032-ea87c84f9953\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.169606 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.188913 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.208779 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 16:33:43 crc kubenswrapper[5002]: E1203 16:33:43.212384 5002 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/catalog-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 03 16:33:43 crc kubenswrapper[5002]: E1203 16:33:43.212417 5002 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/package-server-manager-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 03 16:33:43 crc kubenswrapper[5002]: E1203 16:33:43.212498 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d77e2cb1-507a-44f6-b273-d2140d626699-package-server-manager-serving-cert podName:d77e2cb1-507a-44f6-b273-d2140d626699 nodeName:}" failed. No retries permitted until 2025-12-03 16:33:43.712471501 +0000 UTC m=+147.126293399 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "package-server-manager-serving-cert" (UniqueName: "kubernetes.io/secret/d77e2cb1-507a-44f6-b273-d2140d626699-package-server-manager-serving-cert") pod "package-server-manager-789f6589d5-dqgqr" (UID: "d77e2cb1-507a-44f6-b273-d2140d626699") : failed to sync secret cache: timed out waiting for the condition Dec 03 16:33:43 crc kubenswrapper[5002]: E1203 16:33:43.212534 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/179eb6c2-e4c4-4e61-baed-5c5628c342c8-srv-cert podName:179eb6c2-e4c4-4e61-baed-5c5628c342c8 nodeName:}" failed. No retries permitted until 2025-12-03 16:33:43.712519982 +0000 UTC m=+147.126341990 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/179eb6c2-e4c4-4e61-baed-5c5628c342c8-srv-cert") pod "catalog-operator-68c6474976-stdlq" (UID: "179eb6c2-e4c4-4e61-baed-5c5628c342c8") : failed to sync secret cache: timed out waiting for the condition Dec 03 16:33:43 crc kubenswrapper[5002]: E1203 16:33:43.214881 5002 secret.go:188] Couldn't get secret openshift-image-registry/image-registry-operator-tls: failed to sync secret cache: timed out waiting for the condition Dec 03 16:33:43 crc kubenswrapper[5002]: E1203 16:33:43.214937 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-image-registry-operator-tls podName:0b16e68a-d8fd-46cb-918c-96b83f9df4b2 nodeName:}" failed. No retries permitted until 2025-12-03 16:33:43.714917486 +0000 UTC m=+147.128739374 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "image-registry-operator-tls" (UniqueName: "kubernetes.io/secret/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-image-registry-operator-tls") pod "cluster-image-registry-operator-dc59b4c8b-xq8x8" (UID: "0b16e68a-d8fd-46cb-918c-96b83f9df4b2") : failed to sync secret cache: timed out waiting for the condition Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.232719 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.247959 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.269827 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.288334 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.310425 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.329164 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.348444 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.369367 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.409137 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.428781 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.448445 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.468601 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.488725 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.508674 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.528456 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.549097 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.569200 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.589454 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.609465 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.627942 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.648151 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.668603 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.690082 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.708166 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.728772 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.739522 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:43 crc kubenswrapper[5002]: E1203 16:33:43.739766 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:35:45.739723221 +0000 UTC m=+269.153545109 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.739992 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.740094 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/d77e2cb1-507a-44f6-b273-d2140d626699-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dqgqr\" (UID: \"d77e2cb1-507a-44f6-b273-d2140d626699\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.740136 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/179eb6c2-e4c4-4e61-baed-5c5628c342c8-srv-cert\") pod \"catalog-operator-68c6474976-stdlq\" (UID: \"179eb6c2-e4c4-4e61-baed-5c5628c342c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.740201 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.740231 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.740958 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.743369 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/179eb6c2-e4c4-4e61-baed-5c5628c342c8-srv-cert\") pod \"catalog-operator-68c6474976-stdlq\" (UID: \"179eb6c2-e4c4-4e61-baed-5c5628c342c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.743646 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.744030 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.748070 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/d77e2cb1-507a-44f6-b273-d2140d626699-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dqgqr\" (UID: \"d77e2cb1-507a-44f6-b273-d2140d626699\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.749410 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.768699 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.802886 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvfxz\" (UniqueName: \"kubernetes.io/projected/756420f9-086d-4b08-8094-377c93482ca7-kube-api-access-pvfxz\") pod \"machine-approver-56656f9798-pzhk5\" (UID: \"756420f9-086d-4b08-8094-377c93482ca7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.808917 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.833306 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv2sx\" (UniqueName: \"kubernetes.io/projected/5fa63999-6c0a-4b33-8585-ef7c04ceab79-kube-api-access-hv2sx\") pod \"downloads-7954f5f757-bpkhn\" (UID: \"5fa63999-6c0a-4b33-8585-ef7c04ceab79\") " pod="openshift-console/downloads-7954f5f757-bpkhn" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.841721 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.841891 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.845435 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.846082 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.847405 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.848801 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntddk\" (UniqueName: \"kubernetes.io/projected/3f3e66a3-d515-4057-86c3-c0d956189e98-kube-api-access-ntddk\") pod \"openshift-config-operator-7777fb866f-hrqfx\" (UID: \"3f3e66a3-d515-4057-86c3-c0d956189e98\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.866633 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.868408 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.883558 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.889464 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.909253 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.942322 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr2jp\" (UniqueName: \"kubernetes.io/projected/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-kube-api-access-sr2jp\") pod \"console-f9d7485db-68mq2\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.954633 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:33:43 crc kubenswrapper[5002]: I1203 16:33:43.985718 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dffhq\" (UniqueName: \"kubernetes.io/projected/074f396d-24a0-47a4-836a-636ed06d95e6-kube-api-access-dffhq\") pod \"openshift-controller-manager-operator-756b6f6bc6-s57v6\" (UID: \"074f396d-24a0-47a4-836a-636ed06d95e6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.003719 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rpfg\" (UniqueName: \"kubernetes.io/projected/6424af90-f4da-452e-a0fe-0cc758f2dc7f-kube-api-access-9rpfg\") pod \"openshift-apiserver-operator-796bbdcf4f-4kvb4\" (UID: \"6424af90-f4da-452e-a0fe-0cc758f2dc7f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.014391 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.026864 5002 request.go:700] Waited for 1.910165719s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication-operator/serviceaccounts/authentication-operator/token Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.033878 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6cc7\" (UniqueName: \"kubernetes.io/projected/fc315c4e-b735-4cd3-92d2-34b505810a5d-kube-api-access-s6cc7\") pod \"apiserver-76f77b778f-6zz9b\" (UID: \"fc315c4e-b735-4cd3-92d2-34b505810a5d\") " pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.044793 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kts9\" (UniqueName: \"kubernetes.io/projected/65d93135-ae61-4e1a-be22-29f7dfee2808-kube-api-access-2kts9\") pod \"authentication-operator-69f744f599-rr89p\" (UID: \"65d93135-ae61-4e1a-be22-29f7dfee2808\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.066852 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6v2t\" (UniqueName: \"kubernetes.io/projected/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-kube-api-access-r6v2t\") pod \"oauth-openshift-558db77b4-cfssq\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:44 crc kubenswrapper[5002]: W1203 16:33:44.078951 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-93333eb80d436fdd0d31a26727edc69dd8ba3d605dffd0cbe737a2e81285d4a8 WatchSource:0}: Error finding container 93333eb80d436fdd0d31a26727edc69dd8ba3d605dffd0cbe737a2e81285d4a8: Status 404 returned error can't find the container with id 93333eb80d436fdd0d31a26727edc69dd8ba3d605dffd0cbe737a2e81285d4a8 Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.083557 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jldnw\" (UniqueName: \"kubernetes.io/projected/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-kube-api-access-jldnw\") pod \"controller-manager-879f6c89f-ln6gt\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.087926 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.100028 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.100134 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-bpkhn" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.109163 5002 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.124430 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.128704 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.133657 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.145924 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" Dec 03 16:33:44 crc kubenswrapper[5002]: W1203 16:33:44.154786 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-0c8f2054dc7c91a8fd4a3ab57b8482955b2031997c03fb362ae23ce14dc00442 WatchSource:0}: Error finding container 0c8f2054dc7c91a8fd4a3ab57b8482955b2031997c03fb362ae23ce14dc00442: Status 404 returned error can't find the container with id 0c8f2054dc7c91a8fd4a3ab57b8482955b2031997c03fb362ae23ce14dc00442 Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.155981 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.156881 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.181972 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hg7qm\" (UniqueName: \"kubernetes.io/projected/392438fa-8a16-4db3-9eb2-a37287f5b558-kube-api-access-hg7qm\") pod \"cluster-samples-operator-665b6dd947-j964n\" (UID: \"392438fa-8a16-4db3-9eb2-a37287f5b558\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.186712 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbp5w\" (UniqueName: \"kubernetes.io/projected/64b9dc30-c0fb-461c-aa13-fe92db94c162-kube-api-access-vbp5w\") pod \"machine-config-controller-84d6567774-9hc4g\" (UID: \"64b9dc30-c0fb-461c-aa13-fe92db94c162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.206207 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7whdl\" (UniqueName: \"kubernetes.io/projected/892ffee2-4865-49b9-aaed-18176803dabb-kube-api-access-7whdl\") pod \"multus-admission-controller-857f4d67dd-7jwsj\" (UID: \"892ffee2-4865-49b9-aaed-18176803dabb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7jwsj" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.226533 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f8mj\" (UniqueName: \"kubernetes.io/projected/0feb7832-783f-44ca-90d0-dd9685bf1031-kube-api-access-7f8mj\") pod \"machine-config-operator-74547568cd-vnl7s\" (UID: \"0feb7832-783f-44ca-90d0-dd9685bf1031\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.244573 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f42gq\" (UniqueName: \"kubernetes.io/projected/2b1ff8cc-9661-4d44-8364-96c766b70087-kube-api-access-f42gq\") pod \"kube-storage-version-migrator-operator-b67b599dd-x6qbh\" (UID: \"2b1ff8cc-9661-4d44-8364-96c766b70087\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.263187 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvfvb\" (UniqueName: \"kubernetes.io/projected/82c6ac8e-02c5-43cb-aa31-ec7df47f35b0-kube-api-access-xvfvb\") pod \"apiserver-7bbb656c7d-72gmf\" (UID: \"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.280863 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.286231 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fccw\" (UniqueName: \"kubernetes.io/projected/6e8cf598-b803-4504-a472-49efee59fd59-kube-api-access-7fccw\") pod \"machine-api-operator-5694c8668f-8xg65\" (UID: \"6e8cf598-b803-4504-a472-49efee59fd59\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.290127 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.309071 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.309951 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx24n\" (UniqueName: \"kubernetes.io/projected/3fad564d-947c-411c-b1e7-0e5d82ebb310-kube-api-access-cx24n\") pod \"route-controller-manager-6576b87f9c-5prhq\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.315881 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.332025 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzrmz\" (UniqueName: \"kubernetes.io/projected/17833d15-ffa9-496b-8ee8-6f97fd7f976e-kube-api-access-gzrmz\") pod \"etcd-operator-b45778765-68g57\" (UID: \"17833d15-ffa9-496b-8ee8-6f97fd7f976e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.345375 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/43c67705-ae68-4ff3-a7d0-5666619f90bb-bound-sa-token\") pod \"ingress-operator-5b745b69d9-r2gz4\" (UID: \"43c67705-ae68-4ff3-a7d0-5666619f90bb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.350959 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-7jwsj" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.359349 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.363130 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bm9m\" (UniqueName: \"kubernetes.io/projected/3ffcb996-f277-4b13-942a-ff911dcf1899-kube-api-access-8bm9m\") pod \"control-plane-machine-set-operator-78cbb6b69f-d5jnd\" (UID: \"3ffcb996-f277-4b13-942a-ff911dcf1899\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.399993 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g5fr\" (UniqueName: \"kubernetes.io/projected/5cf52b45-df6d-44b7-92bb-703ae07fb259-kube-api-access-2g5fr\") pod \"migrator-59844c95c7-2vv8k\" (UID: \"5cf52b45-df6d-44b7-92bb-703ae07fb259\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2vv8k" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.408015 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6jrf\" (UniqueName: \"kubernetes.io/projected/43c67705-ae68-4ff3-a7d0-5666619f90bb-kube-api-access-r6jrf\") pod \"ingress-operator-5b745b69d9-r2gz4\" (UID: \"43c67705-ae68-4ff3-a7d0-5666619f90bb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.419659 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx"] Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.423794 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4"] Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.432083 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-68mq2"] Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.438544 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cfssq"] Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.438602 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-bpkhn"] Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.441876 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9xf2\" (UniqueName: \"kubernetes.io/projected/306f45fb-8587-468f-8032-ea87c84f9953-kube-api-access-v9xf2\") pod \"olm-operator-6b444d44fb-9td8r\" (UID: \"306f45fb-8587-468f-8032-ea87c84f9953\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:44 crc kubenswrapper[5002]: W1203 16:33:44.444155 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f3e66a3_d515_4057_86c3_c0d956189e98.slice/crio-f7916460c607140198479a179de29a44a428cd2fe1d179275228b9483d65be6f WatchSource:0}: Error finding container f7916460c607140198479a179de29a44a428cd2fe1d179275228b9483d65be6f: Status 404 returned error can't find the container with id f7916460c607140198479a179de29a44a428cd2fe1d179275228b9483d65be6f Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.448276 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47611376-613f-44da-9154-7c8f3dfa936c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zkzzk\" (UID: \"47611376-613f-44da-9154-7c8f3dfa936c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.468607 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.469882 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.484179 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqfln\" (UniqueName: \"kubernetes.io/projected/b5cbe8f4-807c-412d-91e5-bb1a5b6c677a-kube-api-access-mqfln\") pod \"dns-operator-744455d44c-fm2l2\" (UID: \"b5cbe8f4-807c-412d-91e5-bb1a5b6c677a\") " pod="openshift-dns-operator/dns-operator-744455d44c-fm2l2" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.488723 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.490423 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.504776 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-fm2l2" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.508810 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd5lc\" (UniqueName: \"kubernetes.io/projected/179eb6c2-e4c4-4e61-baed-5c5628c342c8-kube-api-access-pd5lc\") pod \"catalog-operator-68c6474976-stdlq\" (UID: \"179eb6c2-e4c4-4e61-baed-5c5628c342c8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.529907 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9p7h6\" (UniqueName: \"kubernetes.io/projected/1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe-kube-api-access-9p7h6\") pod \"router-default-5444994796-wtrm8\" (UID: \"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe\") " pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.543917 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8mnj\" (UniqueName: \"kubernetes.io/projected/d77e2cb1-507a-44f6-b273-d2140d626699-kube-api-access-q8mnj\") pod \"package-server-manager-789f6589d5-dqgqr\" (UID: \"d77e2cb1-507a-44f6-b273-d2140d626699\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.554016 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.560241 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.568373 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/859891e6-8a79-45cc-91af-d40414173836-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-g4prj\" (UID: \"859891e6-8a79-45cc-91af-d40414173836\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.574307 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.586470 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd4br\" (UniqueName: \"kubernetes.io/projected/0b16e68a-d8fd-46cb-918c-96b83f9df4b2-kube-api-access-qd4br\") pod \"cluster-image-registry-operator-dc59b4c8b-xq8x8\" (UID: \"0b16e68a-d8fd-46cb-918c-96b83f9df4b2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.589157 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2vv8k" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.598887 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.603098 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.610260 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dbff1165-ffbe-4eea-8541-ad40311df417-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6zxln\" (UID: \"dbff1165-ffbe-4eea-8541-ad40311df417\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.652060 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.657632 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp6zj\" (UniqueName: \"kubernetes.io/projected/62df1cbe-97fd-495e-8249-0a697e526ec9-kube-api-access-wp6zj\") pod \"console-operator-58897d9998-sprqt\" (UID: \"62df1cbe-97fd-495e-8249-0a697e526ec9\") " pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.662880 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/56badc5f-4e9d-4129-855f-4c2a54eb63d8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.662940 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/56badc5f-4e9d-4129-855f-4c2a54eb63d8-registry-certificates\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.663122 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.663193 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/56badc5f-4e9d-4129-855f-4c2a54eb63d8-trusted-ca\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.663218 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/56badc5f-4e9d-4129-855f-4c2a54eb63d8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.663255 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b1e5478b-5439-41eb-b83b-700e37123781-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qqqkv\" (UID: \"b1e5478b-5439-41eb-b83b-700e37123781\") " pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.663282 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmkrv\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-kube-api-access-mmkrv\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.663342 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drz44\" (UniqueName: \"kubernetes.io/projected/b1e5478b-5439-41eb-b83b-700e37123781-kube-api-access-drz44\") pod \"marketplace-operator-79b997595-qqqkv\" (UID: \"b1e5478b-5439-41eb-b83b-700e37123781\") " pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.663378 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-bound-sa-token\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.663456 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-registry-tls\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.663490 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b1e5478b-5439-41eb-b83b-700e37123781-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qqqkv\" (UID: \"b1e5478b-5439-41eb-b83b-700e37123781\") " pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:33:44 crc kubenswrapper[5002]: E1203 16:33:44.663704 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:45.163687348 +0000 UTC m=+148.577509236 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.670429 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-bpkhn" event={"ID":"5fa63999-6c0a-4b33-8585-ef7c04ceab79","Type":"ContainerStarted","Data":"fcb5486299c431b35d55d32873d3ed1e81a2e840342d83db4d83979319152ba6"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.670963 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.679401 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" event={"ID":"3f3e66a3-d515-4057-86c3-c0d956189e98","Type":"ContainerStarted","Data":"f7916460c607140198479a179de29a44a428cd2fe1d179275228b9483d65be6f"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.681390 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.694580 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"2d8994983e2f962941d1cb4efc96677734a1e78d35c6000c65c5a8602245f41c"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.694623 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"0c8f2054dc7c91a8fd4a3ab57b8482955b2031997c03fb362ae23ce14dc00442"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.696909 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.706080 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.708598 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"b08df5318435d02d58bb7611fc8420b9b96188c9f645c21fab95a41605a36409"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.708635 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"93333eb80d436fdd0d31a26727edc69dd8ba3d605dffd0cbe737a2e81285d4a8"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.709081 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.710729 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-6zz9b"] Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.712758 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" event={"ID":"6424af90-f4da-452e-a0fe-0cc758f2dc7f","Type":"ContainerStarted","Data":"cf31d40ee436b49d746f42bdbb85d206e8073ac3de023e4dd04d55b5f9abf65a"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.713491 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-68mq2" event={"ID":"2ff3812c-cb2a-4b07-b140-0f0b97b35e13","Type":"ContainerStarted","Data":"5f83bd7f04fdd583e1ad96690c27eb6d4efa0f6568bcdbc1348133f3e284ab41"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.718007 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.718105 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" event={"ID":"756420f9-086d-4b08-8094-377c93482ca7","Type":"ContainerStarted","Data":"c33a283eaf77b80f325df6f73ba06c6f1295cb0719e3c20bdbc94ffdea024c3c"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.718171 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" event={"ID":"756420f9-086d-4b08-8094-377c93482ca7","Type":"ContainerStarted","Data":"b9f632c97c5d827d6bd692502140dcbd081e0983773f7e39ed420713b0cafee9"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.746423 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" event={"ID":"aa31951c-cf30-4f7f-aaf5-f1a1109f8869","Type":"ContainerStarted","Data":"626e1cda810eb8cf831145d1919bae1ce1a49d498923d58ab48c2806823120ae"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.764364 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.764702 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ghsm\" (UniqueName: \"kubernetes.io/projected/076f1f3c-0e74-4226-bab7-ac95438354e4-kube-api-access-6ghsm\") pod \"service-ca-9c57cc56f-jnmmj\" (UID: \"076f1f3c-0e74-4226-bab7-ac95438354e4\") " pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.764731 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c6cd8f6f-3bf5-4725-a936-67049ff23b9b-webhook-cert\") pod \"packageserver-d55dfcdfc-xx7sk\" (UID: \"c6cd8f6f-3bf5-4725-a936-67049ff23b9b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.764873 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-registration-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.764895 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/076f1f3c-0e74-4226-bab7-ac95438354e4-signing-cabundle\") pod \"service-ca-9c57cc56f-jnmmj\" (UID: \"076f1f3c-0e74-4226-bab7-ac95438354e4\") " pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.764942 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b5b31661-f06f-4465-a202-4824f903eeaa-metrics-tls\") pod \"dns-default-dvz54\" (UID: \"b5b31661-f06f-4465-a202-4824f903eeaa\") " pod="openshift-dns/dns-default-dvz54" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.764964 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlq6k\" (UniqueName: \"kubernetes.io/projected/c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d-kube-api-access-xlq6k\") pod \"service-ca-operator-777779d784-7ltpf\" (UID: \"c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.765008 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/89c37c9b-914f-443e-af95-15b1283764ec-cert\") pod \"ingress-canary-vffvr\" (UID: \"89c37c9b-914f-443e-af95-15b1283764ec\") " pod="openshift-ingress-canary/ingress-canary-vffvr" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.765142 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/4e2e7e04-69b4-40eb-8cad-b9964df47337-certs\") pod \"machine-config-server-qcm4d\" (UID: \"4e2e7e04-69b4-40eb-8cad-b9964df47337\") " pod="openshift-machine-config-operator/machine-config-server-qcm4d" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.765285 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/4e2e7e04-69b4-40eb-8cad-b9964df47337-node-bootstrap-token\") pod \"machine-config-server-qcm4d\" (UID: \"4e2e7e04-69b4-40eb-8cad-b9964df47337\") " pod="openshift-machine-config-operator/machine-config-server-qcm4d" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.765317 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgkwj\" (UniqueName: \"kubernetes.io/projected/c6cd8f6f-3bf5-4725-a936-67049ff23b9b-kube-api-access-vgkwj\") pod \"packageserver-d55dfcdfc-xx7sk\" (UID: \"c6cd8f6f-3bf5-4725-a936-67049ff23b9b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.765915 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jhv9\" (UniqueName: \"kubernetes.io/projected/b5b31661-f06f-4465-a202-4824f903eeaa-kube-api-access-5jhv9\") pod \"dns-default-dvz54\" (UID: \"b5b31661-f06f-4465-a202-4824f903eeaa\") " pod="openshift-dns/dns-default-dvz54" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.765948 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f4a92b26-42c8-4f22-bfba-6c63140c6501-secret-volume\") pod \"collect-profiles-29412990-d57rp\" (UID: \"f4a92b26-42c8-4f22-bfba-6c63140c6501\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.766053 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/56badc5f-4e9d-4129-855f-4c2a54eb63d8-trusted-ca\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.766070 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/56badc5f-4e9d-4129-855f-4c2a54eb63d8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.766087 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b5b31661-f06f-4465-a202-4824f903eeaa-config-volume\") pod \"dns-default-dvz54\" (UID: \"b5b31661-f06f-4465-a202-4824f903eeaa\") " pod="openshift-dns/dns-default-dvz54" Dec 03 16:33:44 crc kubenswrapper[5002]: E1203 16:33:44.766281 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:45.266257405 +0000 UTC m=+148.680079293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.766278 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"3d6cc5efb05c3d109c58d5d4d7d5598bb70b45adb061538b170be42293819a32"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.766331 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"c0f017157743861d75ff57e9ea06a07922d6d8df0f5510e53c69b8e6ac248a8f"} Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.766486 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmkrv\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-kube-api-access-mmkrv\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.766506 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b1e5478b-5439-41eb-b83b-700e37123781-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qqqkv\" (UID: \"b1e5478b-5439-41eb-b83b-700e37123781\") " pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.766523 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-csi-data-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.766572 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4a92b26-42c8-4f22-bfba-6c63140c6501-config-volume\") pod \"collect-profiles-29412990-d57rp\" (UID: \"f4a92b26-42c8-4f22-bfba-6c63140c6501\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.766681 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drz44\" (UniqueName: \"kubernetes.io/projected/b1e5478b-5439-41eb-b83b-700e37123781-kube-api-access-drz44\") pod \"marketplace-operator-79b997595-qqqkv\" (UID: \"b1e5478b-5439-41eb-b83b-700e37123781\") " pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.767359 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-bound-sa-token\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.767392 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kkkn\" (UniqueName: \"kubernetes.io/projected/e72f9b17-a374-4625-8505-dfdeab584c8f-kube-api-access-2kkkn\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.767414 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkmhj\" (UniqueName: \"kubernetes.io/projected/4e2e7e04-69b4-40eb-8cad-b9964df47337-kube-api-access-fkmhj\") pod \"machine-config-server-qcm4d\" (UID: \"4e2e7e04-69b4-40eb-8cad-b9964df47337\") " pod="openshift-machine-config-operator/machine-config-server-qcm4d" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.767433 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c6cd8f6f-3bf5-4725-a936-67049ff23b9b-apiservice-cert\") pod \"packageserver-d55dfcdfc-xx7sk\" (UID: \"c6cd8f6f-3bf5-4725-a936-67049ff23b9b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.767463 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlbjp\" (UniqueName: \"kubernetes.io/projected/f4a92b26-42c8-4f22-bfba-6c63140c6501-kube-api-access-mlbjp\") pod \"collect-profiles-29412990-d57rp\" (UID: \"f4a92b26-42c8-4f22-bfba-6c63140c6501\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.767533 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-registry-tls\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.767550 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-mountpoint-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.767672 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/56badc5f-4e9d-4129-855f-4c2a54eb63d8-trusted-ca\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.768069 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/c6cd8f6f-3bf5-4725-a936-67049ff23b9b-tmpfs\") pod \"packageserver-d55dfcdfc-xx7sk\" (UID: \"c6cd8f6f-3bf5-4725-a936-67049ff23b9b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.768127 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b1e5478b-5439-41eb-b83b-700e37123781-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qqqkv\" (UID: \"b1e5478b-5439-41eb-b83b-700e37123781\") " pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.768193 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/076f1f3c-0e74-4226-bab7-ac95438354e4-signing-key\") pod \"service-ca-9c57cc56f-jnmmj\" (UID: \"076f1f3c-0e74-4226-bab7-ac95438354e4\") " pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.768260 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-socket-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.768295 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/56badc5f-4e9d-4129-855f-4c2a54eb63d8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.768317 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d-serving-cert\") pod \"service-ca-operator-777779d784-7ltpf\" (UID: \"c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.768373 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77dp9\" (UniqueName: \"kubernetes.io/projected/89c37c9b-914f-443e-af95-15b1283764ec-kube-api-access-77dp9\") pod \"ingress-canary-vffvr\" (UID: \"89c37c9b-914f-443e-af95-15b1283764ec\") " pod="openshift-ingress-canary/ingress-canary-vffvr" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.773645 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/56badc5f-4e9d-4129-855f-4c2a54eb63d8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.775200 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/56badc5f-4e9d-4129-855f-4c2a54eb63d8-registry-certificates\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.775702 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d-config\") pod \"service-ca-operator-777779d784-7ltpf\" (UID: \"c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.775996 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/56badc5f-4e9d-4129-855f-4c2a54eb63d8-registry-certificates\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.776052 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-plugins-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.782474 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ln6gt"] Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.784925 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b1e5478b-5439-41eb-b83b-700e37123781-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qqqkv\" (UID: \"b1e5478b-5439-41eb-b83b-700e37123781\") " pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.787381 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b1e5478b-5439-41eb-b83b-700e37123781-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qqqkv\" (UID: \"b1e5478b-5439-41eb-b83b-700e37123781\") " pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.799761 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-registry-tls\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.801079 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/56badc5f-4e9d-4129-855f-4c2a54eb63d8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.804455 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-bound-sa-token\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.812131 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.843594 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drz44\" (UniqueName: \"kubernetes.io/projected/b1e5478b-5439-41eb-b83b-700e37123781-kube-api-access-drz44\") pod \"marketplace-operator-79b997595-qqqkv\" (UID: \"b1e5478b-5439-41eb-b83b-700e37123781\") " pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.870397 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmkrv\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-kube-api-access-mmkrv\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.878967 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/4e2e7e04-69b4-40eb-8cad-b9964df47337-certs\") pod \"machine-config-server-qcm4d\" (UID: \"4e2e7e04-69b4-40eb-8cad-b9964df47337\") " pod="openshift-machine-config-operator/machine-config-server-qcm4d" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879020 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879038 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/4e2e7e04-69b4-40eb-8cad-b9964df47337-node-bootstrap-token\") pod \"machine-config-server-qcm4d\" (UID: \"4e2e7e04-69b4-40eb-8cad-b9964df47337\") " pod="openshift-machine-config-operator/machine-config-server-qcm4d" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879055 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgkwj\" (UniqueName: \"kubernetes.io/projected/c6cd8f6f-3bf5-4725-a936-67049ff23b9b-kube-api-access-vgkwj\") pod \"packageserver-d55dfcdfc-xx7sk\" (UID: \"c6cd8f6f-3bf5-4725-a936-67049ff23b9b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879069 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jhv9\" (UniqueName: \"kubernetes.io/projected/b5b31661-f06f-4465-a202-4824f903eeaa-kube-api-access-5jhv9\") pod \"dns-default-dvz54\" (UID: \"b5b31661-f06f-4465-a202-4824f903eeaa\") " pod="openshift-dns/dns-default-dvz54" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879086 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f4a92b26-42c8-4f22-bfba-6c63140c6501-secret-volume\") pod \"collect-profiles-29412990-d57rp\" (UID: \"f4a92b26-42c8-4f22-bfba-6c63140c6501\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879106 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b5b31661-f06f-4465-a202-4824f903eeaa-config-volume\") pod \"dns-default-dvz54\" (UID: \"b5b31661-f06f-4465-a202-4824f903eeaa\") " pod="openshift-dns/dns-default-dvz54" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879127 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-csi-data-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879143 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4a92b26-42c8-4f22-bfba-6c63140c6501-config-volume\") pod \"collect-profiles-29412990-d57rp\" (UID: \"f4a92b26-42c8-4f22-bfba-6c63140c6501\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879179 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kkkn\" (UniqueName: \"kubernetes.io/projected/e72f9b17-a374-4625-8505-dfdeab584c8f-kube-api-access-2kkkn\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879198 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkmhj\" (UniqueName: \"kubernetes.io/projected/4e2e7e04-69b4-40eb-8cad-b9964df47337-kube-api-access-fkmhj\") pod \"machine-config-server-qcm4d\" (UID: \"4e2e7e04-69b4-40eb-8cad-b9964df47337\") " pod="openshift-machine-config-operator/machine-config-server-qcm4d" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879212 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c6cd8f6f-3bf5-4725-a936-67049ff23b9b-apiservice-cert\") pod \"packageserver-d55dfcdfc-xx7sk\" (UID: \"c6cd8f6f-3bf5-4725-a936-67049ff23b9b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879229 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlbjp\" (UniqueName: \"kubernetes.io/projected/f4a92b26-42c8-4f22-bfba-6c63140c6501-kube-api-access-mlbjp\") pod \"collect-profiles-29412990-d57rp\" (UID: \"f4a92b26-42c8-4f22-bfba-6c63140c6501\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879248 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-mountpoint-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879276 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/c6cd8f6f-3bf5-4725-a936-67049ff23b9b-tmpfs\") pod \"packageserver-d55dfcdfc-xx7sk\" (UID: \"c6cd8f6f-3bf5-4725-a936-67049ff23b9b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879294 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/076f1f3c-0e74-4226-bab7-ac95438354e4-signing-key\") pod \"service-ca-9c57cc56f-jnmmj\" (UID: \"076f1f3c-0e74-4226-bab7-ac95438354e4\") " pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879310 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-socket-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879331 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d-serving-cert\") pod \"service-ca-operator-777779d784-7ltpf\" (UID: \"c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879349 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77dp9\" (UniqueName: \"kubernetes.io/projected/89c37c9b-914f-443e-af95-15b1283764ec-kube-api-access-77dp9\") pod \"ingress-canary-vffvr\" (UID: \"89c37c9b-914f-443e-af95-15b1283764ec\") " pod="openshift-ingress-canary/ingress-canary-vffvr" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879370 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d-config\") pod \"service-ca-operator-777779d784-7ltpf\" (UID: \"c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879384 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-plugins-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879400 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ghsm\" (UniqueName: \"kubernetes.io/projected/076f1f3c-0e74-4226-bab7-ac95438354e4-kube-api-access-6ghsm\") pod \"service-ca-9c57cc56f-jnmmj\" (UID: \"076f1f3c-0e74-4226-bab7-ac95438354e4\") " pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879418 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c6cd8f6f-3bf5-4725-a936-67049ff23b9b-webhook-cert\") pod \"packageserver-d55dfcdfc-xx7sk\" (UID: \"c6cd8f6f-3bf5-4725-a936-67049ff23b9b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879438 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-registration-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879454 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/076f1f3c-0e74-4226-bab7-ac95438354e4-signing-cabundle\") pod \"service-ca-9c57cc56f-jnmmj\" (UID: \"076f1f3c-0e74-4226-bab7-ac95438354e4\") " pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879469 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b5b31661-f06f-4465-a202-4824f903eeaa-metrics-tls\") pod \"dns-default-dvz54\" (UID: \"b5b31661-f06f-4465-a202-4824f903eeaa\") " pod="openshift-dns/dns-default-dvz54" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879485 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlq6k\" (UniqueName: \"kubernetes.io/projected/c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d-kube-api-access-xlq6k\") pod \"service-ca-operator-777779d784-7ltpf\" (UID: \"c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.879501 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/89c37c9b-914f-443e-af95-15b1283764ec-cert\") pod \"ingress-canary-vffvr\" (UID: \"89c37c9b-914f-443e-af95-15b1283764ec\") " pod="openshift-ingress-canary/ingress-canary-vffvr" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.882921 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/4e2e7e04-69b4-40eb-8cad-b9964df47337-node-bootstrap-token\") pod \"machine-config-server-qcm4d\" (UID: \"4e2e7e04-69b4-40eb-8cad-b9964df47337\") " pod="openshift-machine-config-operator/machine-config-server-qcm4d" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.883163 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f4a92b26-42c8-4f22-bfba-6c63140c6501-secret-volume\") pod \"collect-profiles-29412990-d57rp\" (UID: \"f4a92b26-42c8-4f22-bfba-6c63140c6501\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.883231 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-registration-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.882727 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-socket-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.883867 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/076f1f3c-0e74-4226-bab7-ac95438354e4-signing-cabundle\") pod \"service-ca-9c57cc56f-jnmmj\" (UID: \"076f1f3c-0e74-4226-bab7-ac95438354e4\") " pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.884351 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b5b31661-f06f-4465-a202-4824f903eeaa-config-volume\") pod \"dns-default-dvz54\" (UID: \"b5b31661-f06f-4465-a202-4824f903eeaa\") " pod="openshift-dns/dns-default-dvz54" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.884445 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-csi-data-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.886169 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d-config\") pod \"service-ca-operator-777779d784-7ltpf\" (UID: \"c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.886727 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-mountpoint-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.887004 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b5b31661-f06f-4465-a202-4824f903eeaa-metrics-tls\") pod \"dns-default-dvz54\" (UID: \"b5b31661-f06f-4465-a202-4824f903eeaa\") " pod="openshift-dns/dns-default-dvz54" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.887542 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4a92b26-42c8-4f22-bfba-6c63140c6501-config-volume\") pod \"collect-profiles-29412990-d57rp\" (UID: \"f4a92b26-42c8-4f22-bfba-6c63140c6501\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.887982 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e72f9b17-a374-4625-8505-dfdeab584c8f-plugins-dir\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.888638 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c6cd8f6f-3bf5-4725-a936-67049ff23b9b-webhook-cert\") pod \"packageserver-d55dfcdfc-xx7sk\" (UID: \"c6cd8f6f-3bf5-4725-a936-67049ff23b9b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.888990 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/c6cd8f6f-3bf5-4725-a936-67049ff23b9b-tmpfs\") pod \"packageserver-d55dfcdfc-xx7sk\" (UID: \"c6cd8f6f-3bf5-4725-a936-67049ff23b9b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:44 crc kubenswrapper[5002]: E1203 16:33:44.889847 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:45.389826915 +0000 UTC m=+148.803648983 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.901240 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d-serving-cert\") pod \"service-ca-operator-777779d784-7ltpf\" (UID: \"c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.903016 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g"] Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.908963 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c6cd8f6f-3bf5-4725-a936-67049ff23b9b-apiservice-cert\") pod \"packageserver-d55dfcdfc-xx7sk\" (UID: \"c6cd8f6f-3bf5-4725-a936-67049ff23b9b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.912088 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/4e2e7e04-69b4-40eb-8cad-b9964df47337-certs\") pod \"machine-config-server-qcm4d\" (UID: \"4e2e7e04-69b4-40eb-8cad-b9964df47337\") " pod="openshift-machine-config-operator/machine-config-server-qcm4d" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.918353 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/076f1f3c-0e74-4226-bab7-ac95438354e4-signing-key\") pod \"service-ca-9c57cc56f-jnmmj\" (UID: \"076f1f3c-0e74-4226-bab7-ac95438354e4\") " pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.921897 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgkwj\" (UniqueName: \"kubernetes.io/projected/c6cd8f6f-3bf5-4725-a936-67049ff23b9b-kube-api-access-vgkwj\") pod \"packageserver-d55dfcdfc-xx7sk\" (UID: \"c6cd8f6f-3bf5-4725-a936-67049ff23b9b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.923142 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/89c37c9b-914f-443e-af95-15b1283764ec-cert\") pod \"ingress-canary-vffvr\" (UID: \"89c37c9b-914f-443e-af95-15b1283764ec\") " pod="openshift-ingress-canary/ingress-canary-vffvr" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.925549 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6"] Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.939801 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.971809 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jhv9\" (UniqueName: \"kubernetes.io/projected/b5b31661-f06f-4465-a202-4824f903eeaa-kube-api-access-5jhv9\") pod \"dns-default-dvz54\" (UID: \"b5b31661-f06f-4465-a202-4824f903eeaa\") " pod="openshift-dns/dns-default-dvz54" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.976001 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlq6k\" (UniqueName: \"kubernetes.io/projected/c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d-kube-api-access-xlq6k\") pod \"service-ca-operator-777779d784-7ltpf\" (UID: \"c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.980865 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:44 crc kubenswrapper[5002]: E1203 16:33:44.981336 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:45.481319205 +0000 UTC m=+148.895141093 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:44 crc kubenswrapper[5002]: I1203 16:33:44.993761 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.024223 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kkkn\" (UniqueName: \"kubernetes.io/projected/e72f9b17-a374-4625-8505-dfdeab584c8f-kube-api-access-2kkkn\") pod \"csi-hostpathplugin-778kb\" (UID: \"e72f9b17-a374-4625-8505-dfdeab584c8f\") " pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.027366 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.028671 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkmhj\" (UniqueName: \"kubernetes.io/projected/4e2e7e04-69b4-40eb-8cad-b9964df47337-kube-api-access-fkmhj\") pod \"machine-config-server-qcm4d\" (UID: \"4e2e7e04-69b4-40eb-8cad-b9964df47337\") " pod="openshift-machine-config-operator/machine-config-server-qcm4d" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.036655 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlbjp\" (UniqueName: \"kubernetes.io/projected/f4a92b26-42c8-4f22-bfba-6c63140c6501-kube-api-access-mlbjp\") pod \"collect-profiles-29412990-d57rp\" (UID: \"f4a92b26-42c8-4f22-bfba-6c63140c6501\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.042912 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.047862 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.056470 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ghsm\" (UniqueName: \"kubernetes.io/projected/076f1f3c-0e74-4226-bab7-ac95438354e4-kube-api-access-6ghsm\") pod \"service-ca-9c57cc56f-jnmmj\" (UID: \"076f1f3c-0e74-4226-bab7-ac95438354e4\") " pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.064941 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-dvz54" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.072822 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-qcm4d" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.074051 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77dp9\" (UniqueName: \"kubernetes.io/projected/89c37c9b-914f-443e-af95-15b1283764ec-kube-api-access-77dp9\") pod \"ingress-canary-vffvr\" (UID: \"89c37c9b-914f-443e-af95-15b1283764ec\") " pod="openshift-ingress-canary/ingress-canary-vffvr" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.082710 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:45 crc kubenswrapper[5002]: E1203 16:33:45.082809 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:45.582787253 +0000 UTC m=+148.996609151 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.083419 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vffvr" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.099007 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-778kb" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.184266 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:45 crc kubenswrapper[5002]: E1203 16:33:45.185122 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:45.685100863 +0000 UTC m=+149.098922751 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.241898 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-8xg65"] Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.293868 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:45 crc kubenswrapper[5002]: E1203 16:33:45.294326 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:45.794309768 +0000 UTC m=+149.208131646 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.357469 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.395590 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:45 crc kubenswrapper[5002]: E1203 16:33:45.396117 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:45.896090594 +0000 UTC m=+149.309912482 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.400613 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7jwsj"] Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.505923 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:45 crc kubenswrapper[5002]: E1203 16:33:45.506885 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:46.006866531 +0000 UTC m=+149.420688419 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.550701 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rr89p"] Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.596641 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf"] Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.607569 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:45 crc kubenswrapper[5002]: E1203 16:33:45.608078 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:46.108060982 +0000 UTC m=+149.521882870 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.709501 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:45 crc kubenswrapper[5002]: E1203 16:33:45.709906 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:46.209893879 +0000 UTC m=+149.623715767 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.786923 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" event={"ID":"6e8cf598-b803-4504-a472-49efee59fd59","Type":"ContainerStarted","Data":"addb0cf6b7793c097a237a856e09d12c4b53ff221a32d2c96b6585b0395db40d"} Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.789485 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" event={"ID":"a5977d33-9bdf-49f2-ba26-a3e2df8c4063","Type":"ContainerStarted","Data":"4b40af3819f9fd03b59b81fa6b9bf13b547053bc69942a856ef2743c19974131"} Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.796170 5002 generic.go:334] "Generic (PLEG): container finished" podID="3f3e66a3-d515-4057-86c3-c0d956189e98" containerID="83c1da66fedc8276a3509da0e81cf5c659a7d8f5462680b080a5a2c363e9ce03" exitCode=0 Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.796242 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" event={"ID":"3f3e66a3-d515-4057-86c3-c0d956189e98","Type":"ContainerDied","Data":"83c1da66fedc8276a3509da0e81cf5c659a7d8f5462680b080a5a2c363e9ce03"} Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.798356 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-qcm4d" event={"ID":"4e2e7e04-69b4-40eb-8cad-b9964df47337","Type":"ContainerStarted","Data":"2c2c23b84d6e7c3510f2f62c1a2c8dcff20d98c741719813a2c2783eb14d0e8a"} Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.800821 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7jwsj" event={"ID":"892ffee2-4865-49b9-aaed-18176803dabb","Type":"ContainerStarted","Data":"c7ea8826e4a634cc4070c709a994b506e53bb01047c08ec286509a4897ab1615"} Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.810171 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:45 crc kubenswrapper[5002]: E1203 16:33:45.810635 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:46.310616866 +0000 UTC m=+149.724438754 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.820406 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-wtrm8" event={"ID":"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe","Type":"ContainerStarted","Data":"3ae7706ff617a4591ff61c7d3e8720f99f565458deade7970483a4998c9feba5"} Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.847033 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-68mq2" event={"ID":"2ff3812c-cb2a-4b07-b140-0f0b97b35e13","Type":"ContainerStarted","Data":"1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d"} Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.852075 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" event={"ID":"074f396d-24a0-47a4-836a-636ed06d95e6","Type":"ContainerStarted","Data":"38872f6be2c3d581c736511b39ec75e637e41d2bf558d86bb2e82831efb690a1"} Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.890030 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" event={"ID":"6424af90-f4da-452e-a0fe-0cc758f2dc7f","Type":"ContainerStarted","Data":"45c1a7e7700149d1dceaa6a45eacfef83fd6c08e5a1ce35dada1b2dbecbfbeb6"} Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.898058 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-bpkhn" event={"ID":"5fa63999-6c0a-4b33-8585-ef7c04ceab79","Type":"ContainerStarted","Data":"201d92e97e1668a1873741b4d93ba6b8d054b8e1cc2a4a75209eb315f465a9cf"} Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.902491 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-bpkhn" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.913334 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:45 crc kubenswrapper[5002]: E1203 16:33:45.921930 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:46.421914107 +0000 UTC m=+149.835735995 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.928563 5002 patch_prober.go:28] interesting pod/downloads-7954f5f757-bpkhn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.928652 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bpkhn" podUID="5fa63999-6c0a-4b33-8585-ef7c04ceab79" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.929329 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" event={"ID":"fc315c4e-b735-4cd3-92d2-34b505810a5d","Type":"ContainerStarted","Data":"8b0efb6a0a3daa8ac104abc9d63b4854a43d59e57d2708ed54d98089ae3718f7"} Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.957288 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" event={"ID":"756420f9-086d-4b08-8094-377c93482ca7","Type":"ContainerStarted","Data":"12d002c8f8bb34a5d7807d58cc01c062b2845c001ff01e10f858430c5bc89221"} Dec 03 16:33:45 crc kubenswrapper[5002]: I1203 16:33:45.969269 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" event={"ID":"64b9dc30-c0fb-461c-aa13-fe92db94c162","Type":"ContainerStarted","Data":"f8f8a05fc45dd38af5a8e94a42cd73913e2ea99a9d72adb53eafb270359a45f3"} Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.014253 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.015389 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:46.5153723 +0000 UTC m=+149.929194188 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.116564 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.117120 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:46.617100745 +0000 UTC m=+150.030922633 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.217922 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.218110 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:46.718087409 +0000 UTC m=+150.131909307 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.218525 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.218844 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:46.71883668 +0000 UTC m=+150.132658568 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.320100 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.320476 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:46.820450201 +0000 UTC m=+150.234272089 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.320667 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.320978 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:46.820970996 +0000 UTC m=+150.234792884 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.421981 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.422310 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:46.922294469 +0000 UTC m=+150.336116347 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.523711 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.524314 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:47.024293291 +0000 UTC m=+150.438115179 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.625291 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.625511 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:47.125479861 +0000 UTC m=+150.539301749 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.625713 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.626086 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:47.126077617 +0000 UTC m=+150.539899505 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.626324 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pzhk5" podStartSLOduration=125.626292153 podStartE2EDuration="2m5.626292153s" podCreationTimestamp="2025-12-03 16:31:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:46.625802859 +0000 UTC m=+150.039624777" watchObservedRunningTime="2025-12-03 16:33:46.626292153 +0000 UTC m=+150.040114041" Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.716937 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4kvb4" podStartSLOduration=125.716908559 podStartE2EDuration="2m5.716908559s" podCreationTimestamp="2025-12-03 16:31:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:46.713795996 +0000 UTC m=+150.127617904" watchObservedRunningTime="2025-12-03 16:33:46.716908559 +0000 UTC m=+150.130730447" Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.731535 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.732415 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:47.231707966 +0000 UTC m=+150.645530174 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.732569 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.732991 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:47.23297933 +0000 UTC m=+150.646801218 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.776056 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-68mq2" podStartSLOduration=124.776031583 podStartE2EDuration="2m4.776031583s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:46.76808957 +0000 UTC m=+150.181911468" watchObservedRunningTime="2025-12-03 16:33:46.776031583 +0000 UTC m=+150.189853471" Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.842976 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.843508 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:47.34348239 +0000 UTC m=+150.757304278 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.919366 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-bpkhn" podStartSLOduration=124.919340821 podStartE2EDuration="2m4.919340821s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:46.807152967 +0000 UTC m=+150.220974855" watchObservedRunningTime="2025-12-03 16:33:46.919340821 +0000 UTC m=+150.333162709" Dec 03 16:33:46 crc kubenswrapper[5002]: I1203 16:33:46.945898 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:46 crc kubenswrapper[5002]: E1203 16:33:46.983598 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:47.483570242 +0000 UTC m=+150.897392140 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.005882 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-qcm4d" event={"ID":"4e2e7e04-69b4-40eb-8cad-b9964df47337","Type":"ContainerStarted","Data":"04dfa0eddba48a09d0dc80df1c7281e5918374c834107d9512b9ea3bd6f9ec54"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.018890 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7jwsj" event={"ID":"892ffee2-4865-49b9-aaed-18176803dabb","Type":"ContainerStarted","Data":"2b45e1f65d7f2923558f2693ea8b0c96f55144ea1136d8953aae47ee6492bab7"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.018951 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7jwsj" event={"ID":"892ffee2-4865-49b9-aaed-18176803dabb","Type":"ContainerStarted","Data":"a38e91f57995d38b62c17ec7ea26c3befa8d1c18411b87ff7039516da669a81b"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.045597 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc315c4e-b735-4cd3-92d2-34b505810a5d" containerID="a24210ab00afb203e8dd603308bc2c8d22cd8d958dc551517ecef3df3a153272" exitCode=0 Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.045670 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" event={"ID":"fc315c4e-b735-4cd3-92d2-34b505810a5d","Type":"ContainerDied","Data":"a24210ab00afb203e8dd603308bc2c8d22cd8d958dc551517ecef3df3a153272"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.048921 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:47 crc kubenswrapper[5002]: E1203 16:33:47.050329 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:47.55031416 +0000 UTC m=+150.964136038 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.084401 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-7jwsj" podStartSLOduration=125.084382062 podStartE2EDuration="2m5.084382062s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:47.082217843 +0000 UTC m=+150.496039731" watchObservedRunningTime="2025-12-03 16:33:47.084382062 +0000 UTC m=+150.498203950" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.094336 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" event={"ID":"64b9dc30-c0fb-461c-aa13-fe92db94c162","Type":"ContainerStarted","Data":"1398ff369176057f420fe8ed893f98332b85b1eb562c192f6e16e9bd79ad53dc"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.094376 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" event={"ID":"64b9dc30-c0fb-461c-aa13-fe92db94c162","Type":"ContainerStarted","Data":"f3a3ea63d37109fe8c346bb8715d2752f518868cf3b001ec765cb9ac978bbd0f"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.106019 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.116558 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.118466 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-qcm4d" podStartSLOduration=6.118443473 podStartE2EDuration="6.118443473s" podCreationTimestamp="2025-12-03 16:33:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:47.110789909 +0000 UTC m=+150.524611797" watchObservedRunningTime="2025-12-03 16:33:47.118443473 +0000 UTC m=+150.532265361" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.135800 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" event={"ID":"6e8cf598-b803-4504-a472-49efee59fd59","Type":"ContainerStarted","Data":"22273419425a063f128e51caedf67c1e4a843a92979fe16808f9454111d15d13"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.135856 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" event={"ID":"6e8cf598-b803-4504-a472-49efee59fd59","Type":"ContainerStarted","Data":"f427edf341fc3e7646e2a01ca1b51a9a827c0550b31656a40aafbc1dfd5b82a2"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.150569 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:47 crc kubenswrapper[5002]: E1203 16:33:47.153183 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:47.653165154 +0000 UTC m=+151.066987042 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.159821 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq"] Dec 03 16:33:47 crc kubenswrapper[5002]: W1203 16:33:47.162166 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0feb7832_783f_44ca_90d0_dd9685bf1031.slice/crio-c768620e4493ebe84fc19498a87ec94ebbf3ad437e44c2b0bdf5d2a6fd9c65c3 WatchSource:0}: Error finding container c768620e4493ebe84fc19498a87ec94ebbf3ad437e44c2b0bdf5d2a6fd9c65c3: Status 404 returned error can't find the container with id c768620e4493ebe84fc19498a87ec94ebbf3ad437e44c2b0bdf5d2a6fd9c65c3 Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.175144 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" event={"ID":"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0","Type":"ContainerStarted","Data":"84cc63489861c2843cf7fc751bf54eb3e7299b7ea6947963e2221384f742b88d"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.202918 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" event={"ID":"3f3e66a3-d515-4057-86c3-c0d956189e98","Type":"ContainerStarted","Data":"d52f9a038a7806a8bf885a1370f9ac3c58f508a2adc0f32f7395dff4ccd67388"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.203479 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.225229 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-8xg65" podStartSLOduration=125.225212324 podStartE2EDuration="2m5.225212324s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:47.223279361 +0000 UTC m=+150.637101249" watchObservedRunningTime="2025-12-03 16:33:47.225212324 +0000 UTC m=+150.639034212" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.226721 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" event={"ID":"aa31951c-cf30-4f7f-aaf5-f1a1109f8869","Type":"ContainerStarted","Data":"2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.230272 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.237826 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" event={"ID":"a5977d33-9bdf-49f2-ba26-a3e2df8c4063","Type":"ContainerStarted","Data":"ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.238813 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.253311 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:47 crc kubenswrapper[5002]: E1203 16:33:47.254543 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:47.754524518 +0000 UTC m=+151.168346406 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.255934 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.256096 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" event={"ID":"074f396d-24a0-47a4-836a-636ed06d95e6","Type":"ContainerStarted","Data":"5fb0b8139a7368868c4b293f14fc742fce0d0562c21fb0df1990e552f5e6d793"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.263504 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.265980 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9hc4g" podStartSLOduration=125.265963325 podStartE2EDuration="2m5.265963325s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:47.262685547 +0000 UTC m=+150.676507435" watchObservedRunningTime="2025-12-03 16:33:47.265963325 +0000 UTC m=+150.679785203" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.302328 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" event={"ID":"65d93135-ae61-4e1a-be22-29f7dfee2808","Type":"ContainerStarted","Data":"f165aa027bfea691135124c65b39a2c62dd0c287294d93f30e540da2e9e4baa6"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.304205 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" podStartSLOduration=125.304195449 podStartE2EDuration="2m5.304195449s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:47.303467909 +0000 UTC m=+150.717289807" watchObservedRunningTime="2025-12-03 16:33:47.304195449 +0000 UTC m=+150.718017337" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.326993 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-wtrm8" event={"ID":"1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe","Type":"ContainerStarted","Data":"2101bcb47745d080a9cb892737908362235796c5fdcd291861a057d1b6eaa9eb"} Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.327702 5002 patch_prober.go:28] interesting pod/downloads-7954f5f757-bpkhn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.327727 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bpkhn" podUID="5fa63999-6c0a-4b33-8585-ef7c04ceab79" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.340956 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.359077 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:47 crc kubenswrapper[5002]: E1203 16:33:47.362513 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:47.862496981 +0000 UTC m=+151.276318869 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.393626 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-s57v6" podStartSLOduration=125.393599634 podStartE2EDuration="2m5.393599634s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:47.358201765 +0000 UTC m=+150.772023653" watchObservedRunningTime="2025-12-03 16:33:47.393599634 +0000 UTC m=+150.807421522" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.437243 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" podStartSLOduration=125.437224192 podStartE2EDuration="2m5.437224192s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:47.401772803 +0000 UTC m=+150.815594691" watchObservedRunningTime="2025-12-03 16:33:47.437224192 +0000 UTC m=+150.851046070" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.440916 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.440961 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2vv8k"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.460899 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:47 crc kubenswrapper[5002]: E1203 16:33:47.462428 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:47.962403306 +0000 UTC m=+151.376225194 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.506515 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-fm2l2"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.562593 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.563405 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" podStartSLOduration=126.56337693 podStartE2EDuration="2m6.56337693s" podCreationTimestamp="2025-12-03 16:31:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:47.506279411 +0000 UTC m=+150.920101319" watchObservedRunningTime="2025-12-03 16:33:47.56337693 +0000 UTC m=+150.977198818" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.577568 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:47 crc kubenswrapper[5002]: E1203 16:33:47.578465 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:48.078450154 +0000 UTC m=+151.492272042 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.585824 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.586288 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:47 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:47 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:47 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.586342 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:47 crc kubenswrapper[5002]: W1203 16:33:47.587397 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43c67705_ae68_4ff3_a7d0_5666619f90bb.slice/crio-014285f6f10c8f44a99c5fdb5181b04abe6f94838d2b64d634760a71bf10f08e WatchSource:0}: Error finding container 014285f6f10c8f44a99c5fdb5181b04abe6f94838d2b64d634760a71bf10f08e: Status 404 returned error can't find the container with id 014285f6f10c8f44a99c5fdb5181b04abe6f94838d2b64d634760a71bf10f08e Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.588185 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.679280 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:47 crc kubenswrapper[5002]: E1203 16:33:47.679811 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:48.179794418 +0000 UTC m=+151.593616306 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:47 crc kubenswrapper[5002]: W1203 16:33:47.745534 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5cbe8f4_807c_412d_91e5_bb1a5b6c677a.slice/crio-fc06869e0720b05cfd6a6b5b29299d6a7dfec2e3b275080c617538791ada6022 WatchSource:0}: Error finding container fc06869e0720b05cfd6a6b5b29299d6a7dfec2e3b275080c617538791ada6022: Status 404 returned error can't find the container with id fc06869e0720b05cfd6a6b5b29299d6a7dfec2e3b275080c617538791ada6022 Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.747724 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-wtrm8" podStartSLOduration=125.747710858 podStartE2EDuration="2m5.747710858s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:47.745231971 +0000 UTC m=+151.159053859" watchObservedRunningTime="2025-12-03 16:33:47.747710858 +0000 UTC m=+151.161532746" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.779129 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.780590 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:47 crc kubenswrapper[5002]: E1203 16:33:47.781032 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:48.281017589 +0000 UTC m=+151.694839467 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.810514 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" podStartSLOduration=126.810491909 podStartE2EDuration="2m6.810491909s" podCreationTimestamp="2025-12-03 16:31:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:47.808529616 +0000 UTC m=+151.222351504" watchObservedRunningTime="2025-12-03 16:33:47.810491909 +0000 UTC m=+151.224313797" Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.843901 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-sprqt"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.851932 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.883659 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.884505 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:47 crc kubenswrapper[5002]: E1203 16:33:47.884827 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:48.38480375 +0000 UTC m=+151.798625638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.890139 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.894348 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-68g57"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.902246 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.902297 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.916761 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-778kb"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.932281 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-dvz54"] Dec 03 16:33:47 crc kubenswrapper[5002]: W1203 16:33:47.934116 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbff1165_ffbe_4eea_8541_ad40311df417.slice/crio-4a7bcd7008d9df94409adacd1e05b377d89f548d3e9c3cb283906c24bfe48c2c WatchSource:0}: Error finding container 4a7bcd7008d9df94409adacd1e05b377d89f548d3e9c3cb283906c24bfe48c2c: Status 404 returned error can't find the container with id 4a7bcd7008d9df94409adacd1e05b377d89f548d3e9c3cb283906c24bfe48c2c Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.944367 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.950454 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qqqkv"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.952075 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vffvr"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.964661 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp"] Dec 03 16:33:47 crc kubenswrapper[5002]: I1203 16:33:47.988728 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:47 crc kubenswrapper[5002]: E1203 16:33:47.989035 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:48.48902338 +0000 UTC m=+151.902845268 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.003282 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r"] Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.034769 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-jnmmj"] Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.102727 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:48 crc kubenswrapper[5002]: E1203 16:33:48.103091 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:48.603075175 +0000 UTC m=+152.016897063 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:48 crc kubenswrapper[5002]: W1203 16:33:48.140909 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1e5478b_5439_41eb_b83b_700e37123781.slice/crio-c4541bed5b4eb57bc8d45d184c28461b45c8af21b22e4cb6161037f26c108309 WatchSource:0}: Error finding container c4541bed5b4eb57bc8d45d184c28461b45c8af21b22e4cb6161037f26c108309: Status 404 returned error can't find the container with id c4541bed5b4eb57bc8d45d184c28461b45c8af21b22e4cb6161037f26c108309 Dec 03 16:33:48 crc kubenswrapper[5002]: W1203 16:33:48.163205 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc6cd8f6f_3bf5_4725_a936_67049ff23b9b.slice/crio-b5142252e923938507caaf1dc6c27d1c8a31fdeb8f1887242fab4eb7c143e467 WatchSource:0}: Error finding container b5142252e923938507caaf1dc6c27d1c8a31fdeb8f1887242fab4eb7c143e467: Status 404 returned error can't find the container with id b5142252e923938507caaf1dc6c27d1c8a31fdeb8f1887242fab4eb7c143e467 Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.204023 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:48 crc kubenswrapper[5002]: E1203 16:33:48.204353 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:48.704342317 +0000 UTC m=+152.118164205 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.323414 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:48 crc kubenswrapper[5002]: E1203 16:33:48.324329 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:48.8243131 +0000 UTC m=+152.238134988 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.425870 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:48 crc kubenswrapper[5002]: E1203 16:33:48.426917 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:48.926896908 +0000 UTC m=+152.340718796 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.427622 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-sprqt" event={"ID":"62df1cbe-97fd-495e-8249-0a697e526ec9","Type":"ContainerStarted","Data":"520a339ece83a4c4da3bef0860d6fc61da2c0c194f503b7252ad2fb3f4b8587b"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.439956 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" event={"ID":"c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d","Type":"ContainerStarted","Data":"78c7b982bf0ae05b7f579038b4afc96a7cc7735feeea93f298b57f5ef3f9639a"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.442992 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd" event={"ID":"3ffcb996-f277-4b13-942a-ff911dcf1899","Type":"ContainerStarted","Data":"b4124a142fcad973cc82ce2063db7067e2bf7fb5713c2665137cc0fea973a5bf"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.447465 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" event={"ID":"306f45fb-8587-468f-8032-ea87c84f9953","Type":"ContainerStarted","Data":"230b52bbd8fa80ab7b172c99100fa5dadabc9a7f7233a1a4fec797646f6eda9a"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.450162 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" event={"ID":"d77e2cb1-507a-44f6-b273-d2140d626699","Type":"ContainerStarted","Data":"324606b4ddaa5ea00f366a4fb7472404fbb6e266a381f66be305a157c45c0823"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.494830 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" event={"ID":"179eb6c2-e4c4-4e61-baed-5c5628c342c8","Type":"ContainerStarted","Data":"091f6907f9892b0c28b18b26d453f6bb86f8c70494acd0439a3a63c32f71c475"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.522095 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" event={"ID":"c6cd8f6f-3bf5-4725-a936-67049ff23b9b","Type":"ContainerStarted","Data":"b5142252e923938507caaf1dc6c27d1c8a31fdeb8f1887242fab4eb7c143e467"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.524195 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" event={"ID":"859891e6-8a79-45cc-91af-d40414173836","Type":"ContainerStarted","Data":"b5a32179055fb5a77e901122a74c085c9584820b0dbc1ce5fd1420ab518303a9"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.527378 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:48 crc kubenswrapper[5002]: E1203 16:33:48.527862 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:49.027845782 +0000 UTC m=+152.441667670 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.544987 5002 generic.go:334] "Generic (PLEG): container finished" podID="82c6ac8e-02c5-43cb-aa31-ec7df47f35b0" containerID="29e9df2c85ec18525cb72c380b1c6b6d2f1b2436d808fd64be22d3349a7566c1" exitCode=0 Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.545082 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" event={"ID":"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0","Type":"ContainerDied","Data":"29e9df2c85ec18525cb72c380b1c6b6d2f1b2436d808fd64be22d3349a7566c1"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.560674 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-dvz54" event={"ID":"b5b31661-f06f-4465-a202-4824f903eeaa","Type":"ContainerStarted","Data":"0d021c05499d7e520bbe4186b2957709a18fcfc59ee04d4b2106525c562f1c82"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.565910 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:48 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:48 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:48 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.566472 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.630965 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:48 crc kubenswrapper[5002]: E1203 16:33:48.631578 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:49.131552409 +0000 UTC m=+152.545374297 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.688010 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vffvr" event={"ID":"89c37c9b-914f-443e-af95-15b1283764ec","Type":"ContainerStarted","Data":"6e97dd42eb7a75a7918c2dabc6b7cbcc1dd2d11c4e3c26626b5c84dd9130e12d"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.702363 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" event={"ID":"fc315c4e-b735-4cd3-92d2-34b505810a5d","Type":"ContainerStarted","Data":"4aed834aca1d80d3745695cc9dbae544495637dd036af85c9d8a364410f78bfa"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.707994 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n" event={"ID":"392438fa-8a16-4db3-9eb2-a37287f5b558","Type":"ContainerStarted","Data":"09db12b098d27fbcc3c49f9b18dc6c8582cf04f6a5203e2d555aee626bc6ca79"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.733806 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:48 crc kubenswrapper[5002]: E1203 16:33:48.734133 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:49.234118106 +0000 UTC m=+152.647939994 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.734512 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" event={"ID":"2b1ff8cc-9661-4d44-8364-96c766b70087","Type":"ContainerStarted","Data":"6d21376725fd10df64efa10d902f35a3d6723eef61f02644adf66a95dcaeadb8"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.769874 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" podStartSLOduration=126.769853843 podStartE2EDuration="2m6.769853843s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:48.764573002 +0000 UTC m=+152.178394910" watchObservedRunningTime="2025-12-03 16:33:48.769853843 +0000 UTC m=+152.183675731" Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.830235 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-fm2l2" event={"ID":"b5cbe8f4-807c-412d-91e5-bb1a5b6c677a","Type":"ContainerStarted","Data":"fc06869e0720b05cfd6a6b5b29299d6a7dfec2e3b275080c617538791ada6022"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.848913 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:48 crc kubenswrapper[5002]: E1203 16:33:48.850691 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:49.350670637 +0000 UTC m=+152.764492525 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.860550 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" event={"ID":"3fad564d-947c-411c-b1e7-0e5d82ebb310","Type":"ContainerStarted","Data":"79e34a89a195a61ffac6c9c48ea12a628d151f7eea86e8230c776624561c9e04"} Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.861356 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.887311 5002 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-5prhq container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 03 16:33:48 crc kubenswrapper[5002]: I1203 16:33:48.887391 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" podUID="3fad564d-947c-411c-b1e7-0e5d82ebb310" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.054816 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" event={"ID":"0b16e68a-d8fd-46cb-918c-96b83f9df4b2","Type":"ContainerStarted","Data":"74dda7375641e5839ceef7b7c4c4f8ea30059d52ffcfaa2ff7a7815a4f534bc5"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.059346 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:49 crc kubenswrapper[5002]: E1203 16:33:49.087264 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:49.587226693 +0000 UTC m=+153.001048581 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.122798 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" event={"ID":"f4a92b26-42c8-4f22-bfba-6c63140c6501","Type":"ContainerStarted","Data":"7276fc0d926873a168dd78eb2e24cda4eea565781e6725ba384a2fc6b4f6389f"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.129085 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" event={"ID":"47611376-613f-44da-9154-7c8f3dfa936c","Type":"ContainerStarted","Data":"7ae29310185fc45d69702892e6e9a91c3c60ba2ac21b354204b59598c6527754"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.158480 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" event={"ID":"43c67705-ae68-4ff3-a7d0-5666619f90bb","Type":"ContainerStarted","Data":"014285f6f10c8f44a99c5fdb5181b04abe6f94838d2b64d634760a71bf10f08e"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.161963 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:49 crc kubenswrapper[5002]: E1203 16:33:49.162407 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:49.662379406 +0000 UTC m=+153.076201284 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.178936 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2vv8k" event={"ID":"5cf52b45-df6d-44b7-92bb-703ae07fb259","Type":"ContainerStarted","Data":"345ce683a43a7bf93d20c271ffe8ecb515ac7bbda25998434beebbd719445932"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.196161 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rr89p" event={"ID":"65d93135-ae61-4e1a-be22-29f7dfee2808","Type":"ContainerStarted","Data":"603cef6e24995a3e2c40b45ac7d650b1c7f0a656ff9efc6307768a2a49b99a61"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.199951 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" event={"ID":"dbff1165-ffbe-4eea-8541-ad40311df417","Type":"ContainerStarted","Data":"4a7bcd7008d9df94409adacd1e05b377d89f548d3e9c3cb283906c24bfe48c2c"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.215340 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" event={"ID":"076f1f3c-0e74-4226-bab7-ac95438354e4","Type":"ContainerStarted","Data":"56f9ecde6bd17a484d5abd875c96da2973fd74866fa30d07d87c6b96fd1f2e62"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.216104 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" event={"ID":"b1e5478b-5439-41eb-b83b-700e37123781","Type":"ContainerStarted","Data":"c4541bed5b4eb57bc8d45d184c28461b45c8af21b22e4cb6161037f26c108309"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.218198 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" event={"ID":"0feb7832-783f-44ca-90d0-dd9685bf1031","Type":"ContainerStarted","Data":"cba24481fce6e2a0869b002c7fb0d6101757dc385dba3d189e90dc1ec02e1b30"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.218217 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" event={"ID":"0feb7832-783f-44ca-90d0-dd9685bf1031","Type":"ContainerStarted","Data":"c768620e4493ebe84fc19498a87ec94ebbf3ad437e44c2b0bdf5d2a6fd9c65c3"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.222498 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" event={"ID":"17833d15-ffa9-496b-8ee8-6f97fd7f976e","Type":"ContainerStarted","Data":"987b40f631f5c8a44b8c688d1d4d686869eed2fe9d2d3b52d637cfea48ed63a0"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.225451 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-778kb" event={"ID":"e72f9b17-a374-4625-8505-dfdeab584c8f","Type":"ContainerStarted","Data":"dc219f4fbd30efe729dccd84aec29c9ca5498ebe541ddeb0af2c584cd3fc3a3f"} Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.263048 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:49 crc kubenswrapper[5002]: E1203 16:33:49.263204 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:49.763180876 +0000 UTC m=+153.177002764 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.263462 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:49 crc kubenswrapper[5002]: E1203 16:33:49.263901 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:49.763892915 +0000 UTC m=+153.177714803 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.282633 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hrqfx" Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.340844 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" podStartSLOduration=127.340826036 podStartE2EDuration="2m7.340826036s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:49.34026137 +0000 UTC m=+152.754083258" watchObservedRunningTime="2025-12-03 16:33:49.340826036 +0000 UTC m=+152.754647924" Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.341575 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" podStartSLOduration=127.341570695 podStartE2EDuration="2m7.341570695s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:49.095776832 +0000 UTC m=+152.509598720" watchObservedRunningTime="2025-12-03 16:33:49.341570695 +0000 UTC m=+152.755392583" Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.375400 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:49 crc kubenswrapper[5002]: E1203 16:33:49.379126 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:49.879102591 +0000 UTC m=+153.292924479 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.481597 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:49 crc kubenswrapper[5002]: E1203 16:33:49.482022 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:49.982007916 +0000 UTC m=+153.395829804 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.581882 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:49 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:49 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:49 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.581925 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.582212 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:49 crc kubenswrapper[5002]: E1203 16:33:49.583345 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:50.08332036 +0000 UTC m=+153.497142258 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.685797 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:49 crc kubenswrapper[5002]: E1203 16:33:49.686128 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:50.186115743 +0000 UTC m=+153.599937631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.787647 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:49 crc kubenswrapper[5002]: E1203 16:33:49.788045 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:50.287994392 +0000 UTC m=+153.701816290 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:49 crc kubenswrapper[5002]: I1203 16:33:49.788686 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:49 crc kubenswrapper[5002]: E1203 16:33:49.789261 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:50.289250495 +0000 UTC m=+153.703072383 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:49.959834 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:49.960085 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:50.46005501 +0000 UTC m=+153.873876898 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:49.960187 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:49.960524 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:50.460510082 +0000 UTC m=+153.874331960 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.069925 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:50.070249 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:50.57023029 +0000 UTC m=+153.984052178 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.206072 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:50.206552 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:50.706537831 +0000 UTC m=+154.120359719 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.282990 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" event={"ID":"82c6ac8e-02c5-43cb-aa31-ec7df47f35b0","Type":"ContainerStarted","Data":"d8e6c4adc7b5a01ffb5ee248e041ae14ce8d3f60432bf8db323d67fa6e3e3581"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.299105 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-x6qbh" event={"ID":"2b1ff8cc-9661-4d44-8364-96c766b70087","Type":"ContainerStarted","Data":"41b0fed8cb2e29e0521a1be8f29798949825af69806af86d17b1ece7a398ece2"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.310195 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:50.310520 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:50.810504206 +0000 UTC m=+154.224326094 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.330041 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" event={"ID":"c6cd8f6f-3bf5-4725-a936-67049ff23b9b","Type":"ContainerStarted","Data":"e4b79e7b562394a1ce17c3b074e4b12bf3619959b60afc4d117dfc5318ec54d8"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.331640 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.375168 5002 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-xx7sk container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.375241 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" podUID="c6cd8f6f-3bf5-4725-a936-67049ff23b9b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.377417 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-fm2l2" event={"ID":"b5cbe8f4-807c-412d-91e5-bb1a5b6c677a","Type":"ContainerStarted","Data":"ad9e273975f4cf3e516869abf47826be6c8c1d3abba1f07303ffa7e66d379181"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.401215 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" event={"ID":"179eb6c2-e4c4-4e61-baed-5c5628c342c8","Type":"ContainerStarted","Data":"29e4bf3a7202637a1444f4a4b2981f3c59a40e146b972d961057900d838ff5d2"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.402257 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.407293 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" event={"ID":"3fad564d-947c-411c-b1e7-0e5d82ebb310","Type":"ContainerStarted","Data":"190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.408410 5002 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-5prhq container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.408440 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" podUID="3fad564d-947c-411c-b1e7-0e5d82ebb310" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.409816 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd" event={"ID":"3ffcb996-f277-4b13-942a-ff911dcf1899","Type":"ContainerStarted","Data":"711a96dfb6ad6468a50b0c2f49942f148db9d1c9cf25d3048f2e411f59494646"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.411090 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:50.413070 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:50.913055872 +0000 UTC m=+154.326877760 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.415881 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" event={"ID":"f4a92b26-42c8-4f22-bfba-6c63140c6501","Type":"ContainerStarted","Data":"8a381b9d3db8b02b2ce9ab7c166d60f557510bb96325d5894c8a5ede1ec5ae4c"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.457835 5002 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-stdlq container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.457896 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" podUID="179eb6c2-e4c4-4e61-baed-5c5628c342c8" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.466324 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" podStartSLOduration=128.466301628 podStartE2EDuration="2m8.466301628s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:50.401475652 +0000 UTC m=+153.815297540" watchObservedRunningTime="2025-12-03 16:33:50.466301628 +0000 UTC m=+153.880123516" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.467352 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" podStartSLOduration=128.467342556 podStartE2EDuration="2m8.467342556s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:50.464519211 +0000 UTC m=+153.878341099" watchObservedRunningTime="2025-12-03 16:33:50.467342556 +0000 UTC m=+153.881164444" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.472019 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" event={"ID":"47611376-613f-44da-9154-7c8f3dfa936c","Type":"ContainerStarted","Data":"c0cbde0eb7095ce3bd30f60e9df29c7823ec37739b3cbfc5497449a44bc0ac05"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.507709 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-sprqt" event={"ID":"62df1cbe-97fd-495e-8249-0a697e526ec9","Type":"ContainerStarted","Data":"0d2ba5fb9ca23dc9a0128891b76eee9c5bf27f63a4c5bff34b710c54326ec323"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.509309 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.509384 5002 patch_prober.go:28] interesting pod/console-operator-58897d9998-sprqt container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/readyz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.509414 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-sprqt" podUID="62df1cbe-97fd-495e-8249-0a697e526ec9" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.6:8443/readyz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.513338 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:50.515091 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.015071685 +0000 UTC m=+154.428893573 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.560970 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n" event={"ID":"392438fa-8a16-4db3-9eb2-a37287f5b558","Type":"ContainerStarted","Data":"56044230c8d610b786ceded431031155688a83151f76f155d291eadb9f914159"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.574118 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:50 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:50 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:50 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.574177 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.589150 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" event={"ID":"43c67705-ae68-4ff3-a7d0-5666619f90bb","Type":"ContainerStarted","Data":"4de92bf2a087c98a978ffd573d0305d162cfa2077fa109cb1075deafe942f132"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.622008 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.623693 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" event={"ID":"076f1f3c-0e74-4226-bab7-ac95438354e4","Type":"ContainerStarted","Data":"4983030604c4c4b50aa66689e190fedcadfc12cf78d8de753a22fe2b53a869aa"} Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:50.624219 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.124173927 +0000 UTC m=+154.537995815 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.638582 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" podStartSLOduration=128.638563472 podStartE2EDuration="2m8.638563472s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:50.636979569 +0000 UTC m=+154.050801467" watchObservedRunningTime="2025-12-03 16:33:50.638563472 +0000 UTC m=+154.052385360" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.641178 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2vv8k" event={"ID":"5cf52b45-df6d-44b7-92bb-703ae07fb259","Type":"ContainerStarted","Data":"e5b487949c21a7047d23f4266aab29a47982a9c83c598dc8545b8df57649983e"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.647694 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" event={"ID":"d77e2cb1-507a-44f6-b273-d2140d626699","Type":"ContainerStarted","Data":"506cc9682182d6095a9ced8d47c7574efcc24357a0304efd2c22030eb45bc1b6"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.652890 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" event={"ID":"dbff1165-ffbe-4eea-8541-ad40311df417","Type":"ContainerStarted","Data":"02dba16b0f0d735d4dd1e0a1270b61919db66fc19543e7fc71a010625812c968"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.669377 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vnl7s" event={"ID":"0feb7832-783f-44ca-90d0-dd9685bf1031","Type":"ContainerStarted","Data":"8845be5dbf4074a2b68e8fcf84e5aafdb80cd7cdd30df20caf1c4bd6e669a964"} Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.724639 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:50.724719 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.224704429 +0000 UTC m=+154.638526317 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.726798 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.732311 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-sprqt" podStartSLOduration=128.732294092 podStartE2EDuration="2m8.732294092s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:50.730406592 +0000 UTC m=+154.144228480" watchObservedRunningTime="2025-12-03 16:33:50.732294092 +0000 UTC m=+154.146115980" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.732664 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" podStartSLOduration=128.732658133 podStartE2EDuration="2m8.732658133s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:50.694434038 +0000 UTC m=+154.108255926" watchObservedRunningTime="2025-12-03 16:33:50.732658133 +0000 UTC m=+154.146480021" Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:50.734996 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.234985464 +0000 UTC m=+154.648807352 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.791062 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" podStartSLOduration=128.791039386 podStartE2EDuration="2m8.791039386s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:50.791032775 +0000 UTC m=+154.204854663" watchObservedRunningTime="2025-12-03 16:33:50.791039386 +0000 UTC m=+154.204861274" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.792339 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkzzk" podStartSLOduration=128.7923297 podStartE2EDuration="2m8.7923297s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:50.757488467 +0000 UTC m=+154.171310355" watchObservedRunningTime="2025-12-03 16:33:50.7923297 +0000 UTC m=+154.206151588" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.811721 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6zxln" podStartSLOduration=128.811696429 podStartE2EDuration="2m8.811696429s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:50.80984203 +0000 UTC m=+154.223663948" watchObservedRunningTime="2025-12-03 16:33:50.811696429 +0000 UTC m=+154.225518317" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.827663 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:50.827918 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.327882753 +0000 UTC m=+154.741704641 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.828196 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:50.828542 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.32852953 +0000 UTC m=+154.742351418 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.837389 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" podStartSLOduration=128.837371836 podStartE2EDuration="2m8.837371836s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:50.834967543 +0000 UTC m=+154.248789441" watchObservedRunningTime="2025-12-03 16:33:50.837371836 +0000 UTC m=+154.251193724" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.871985 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-jnmmj" podStartSLOduration=128.871968233 podStartE2EDuration="2m8.871968233s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:50.869145588 +0000 UTC m=+154.282967476" watchObservedRunningTime="2025-12-03 16:33:50.871968233 +0000 UTC m=+154.285790121" Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.932974 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:50 crc kubenswrapper[5002]: E1203 16:33:50.933283 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.433266265 +0000 UTC m=+154.847088153 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.934114 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:33:50 crc kubenswrapper[5002]: I1203 16:33:50.934166 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.059693 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:51 crc kubenswrapper[5002]: E1203 16:33:51.060158 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.560141453 +0000 UTC m=+154.973963341 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.173400 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:51 crc kubenswrapper[5002]: E1203 16:33:51.173579 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.67355425 +0000 UTC m=+155.087376138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.174061 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:51 crc kubenswrapper[5002]: E1203 16:33:51.174376 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.674364602 +0000 UTC m=+155.088186490 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.275545 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:51 crc kubenswrapper[5002]: E1203 16:33:51.275875 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.775860751 +0000 UTC m=+155.189682629 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.378070 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:51 crc kubenswrapper[5002]: E1203 16:33:51.378412 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.878396897 +0000 UTC m=+155.292218785 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.485698 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:51 crc kubenswrapper[5002]: E1203 16:33:51.485910 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.985891596 +0000 UTC m=+155.399713484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.486501 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:51 crc kubenswrapper[5002]: E1203 16:33:51.486914 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:51.986903423 +0000 UTC m=+155.400725321 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.563392 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:51 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:51 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:51 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.563644 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.591341 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:51 crc kubenswrapper[5002]: E1203 16:33:51.591582 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:52.091560496 +0000 UTC m=+155.505382394 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.692807 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:51 crc kubenswrapper[5002]: E1203 16:33:51.693179 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:52.193164168 +0000 UTC m=+155.606986066 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.729891 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2vv8k" event={"ID":"5cf52b45-df6d-44b7-92bb-703ae07fb259","Type":"ContainerStarted","Data":"f1f799e5ad3412ed3b63394c0793e0ae483b0db0dbcec57307e6f9f916cf9d76"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.737722 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-fm2l2" event={"ID":"b5cbe8f4-807c-412d-91e5-bb1a5b6c677a","Type":"ContainerStarted","Data":"90d540024d4078dcc3e61a07a859e7bb257d494c8a52b4cbaf3b9e43cb79d70b"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.746806 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" event={"ID":"b1e5478b-5439-41eb-b83b-700e37123781","Type":"ContainerStarted","Data":"049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.747219 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.749063 5002 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-qqqkv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.749128 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" podUID="b1e5478b-5439-41eb-b83b-700e37123781" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.749432 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-68g57" event={"ID":"17833d15-ffa9-496b-8ee8-6f97fd7f976e","Type":"ContainerStarted","Data":"29ad476bb198f8680b0e9baee1cd49a62403904e51eca1b9d2ccc05d01f20a15"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.756453 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" event={"ID":"43c67705-ae68-4ff3-a7d0-5666619f90bb","Type":"ContainerStarted","Data":"c1f90290aba4699152cf781abb5faa0e79f8d94959051237375d906d22d69938"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.765306 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-d5jnd" podStartSLOduration=129.765289089 podStartE2EDuration="2m9.765289089s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:50.933365388 +0000 UTC m=+154.347187286" watchObservedRunningTime="2025-12-03 16:33:51.765289089 +0000 UTC m=+155.179110977" Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.771032 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-dvz54" event={"ID":"b5b31661-f06f-4465-a202-4824f903eeaa","Type":"ContainerStarted","Data":"2ef7fc66a417fc62b6d0229d2f4882187450063784744066b9132528431d3252"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.771675 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-dvz54" Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.777989 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" event={"ID":"d77e2cb1-507a-44f6-b273-d2140d626699","Type":"ContainerStarted","Data":"c1ed2eec9d674034cdeec0c7b5663cda9ab2d4022b8864d77ff8c2daca0c669a"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.778705 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.801509 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:51 crc kubenswrapper[5002]: E1203 16:33:51.802625 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:52.302608558 +0000 UTC m=+155.716430446 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.812758 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vffvr" event={"ID":"89c37c9b-914f-443e-af95-15b1283764ec","Type":"ContainerStarted","Data":"c8047d24892c83cfe277cfbcbc417913433a562e69773a4ee4bed7a22586c554"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.839623 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" event={"ID":"c5e48d5b-3b9c-4f4f-a814-60f9c3c25c1d","Type":"ContainerStarted","Data":"fe2915bbbc348a9d74afe7cad7c312bab35c249f5036c80b777fbd3fcbf81cef"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.842394 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2vv8k" podStartSLOduration=129.840740079 podStartE2EDuration="2m9.840740079s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:51.767394966 +0000 UTC m=+155.181216854" watchObservedRunningTime="2025-12-03 16:33:51.840740079 +0000 UTC m=+155.254561967" Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.843038 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" podStartSLOduration=129.843033221 podStartE2EDuration="2m9.843033221s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:51.834508813 +0000 UTC m=+155.248330691" watchObservedRunningTime="2025-12-03 16:33:51.843033221 +0000 UTC m=+155.256855109" Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.845439 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" event={"ID":"0b16e68a-d8fd-46cb-918c-96b83f9df4b2","Type":"ContainerStarted","Data":"88f49f7bc7a8c6c1271a00c05dbe6c06e4848a90d4c846bcda7b4027ab2e4afb"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.873653 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" event={"ID":"fc315c4e-b735-4cd3-92d2-34b505810a5d","Type":"ContainerStarted","Data":"75dbf0a1bcf5bf66d4a6fc32900a344e58b8eee8e3982f2c1169169070b9e5d4"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.892822 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-fm2l2" podStartSLOduration=129.892800763 podStartE2EDuration="2m9.892800763s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:51.86128848 +0000 UTC m=+155.275110368" watchObservedRunningTime="2025-12-03 16:33:51.892800763 +0000 UTC m=+155.306622671" Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.896766 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r2gz4" podStartSLOduration=129.896728269 podStartE2EDuration="2m9.896728269s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:51.891781906 +0000 UTC m=+155.305603794" watchObservedRunningTime="2025-12-03 16:33:51.896728269 +0000 UTC m=+155.310550157" Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.902661 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:51 crc kubenswrapper[5002]: E1203 16:33:51.905020 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:52.405006841 +0000 UTC m=+155.818828729 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.918134 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-778kb" event={"ID":"e72f9b17-a374-4625-8505-dfdeab584c8f","Type":"ContainerStarted","Data":"994266add0971b5ccb4c6ea2d0791d7d268828721d88af7046450de07b9eaf29"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.929357 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7ltpf" podStartSLOduration=129.929330882 podStartE2EDuration="2m9.929330882s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:51.928740806 +0000 UTC m=+155.342562694" watchObservedRunningTime="2025-12-03 16:33:51.929330882 +0000 UTC m=+155.343152760" Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.960337 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g4prj" event={"ID":"859891e6-8a79-45cc-91af-d40414173836","Type":"ContainerStarted","Data":"13bfa2946c5d545d3ca6202d229276b5151dab25c542456fa6f66762bc0229a4"} Dec 03 16:33:51 crc kubenswrapper[5002]: I1203 16:33:51.983498 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" podStartSLOduration=130.983475592 podStartE2EDuration="2m10.983475592s" podCreationTimestamp="2025-12-03 16:31:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:51.98265164 +0000 UTC m=+155.396473538" watchObservedRunningTime="2025-12-03 16:33:51.983475592 +0000 UTC m=+155.397297490" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.020467 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:52 crc kubenswrapper[5002]: E1203 16:33:52.021639 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:52.521620704 +0000 UTC m=+155.935442592 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.028080 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n" event={"ID":"392438fa-8a16-4db3-9eb2-a37287f5b558","Type":"ContainerStarted","Data":"c5767f4b25fd2acbee9ec627d0a0915ec14c552ffdbdd317d681d5a6a39782dd"} Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.068985 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-dvz54" podStartSLOduration=11.068962012 podStartE2EDuration="11.068962012s" podCreationTimestamp="2025-12-03 16:33:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:52.042311878 +0000 UTC m=+155.456133766" watchObservedRunningTime="2025-12-03 16:33:52.068962012 +0000 UTC m=+155.482783900" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.077564 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" event={"ID":"306f45fb-8587-468f-8032-ea87c84f9953","Type":"ContainerStarted","Data":"a47125fd8051f41d47c231840f00d83cac14d9cdddb215574cf5b2491a3a583f"} Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.077619 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.080371 5002 patch_prober.go:28] interesting pod/console-operator-58897d9998-sprqt container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/readyz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.080419 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-sprqt" podUID="62df1cbe-97fd-495e-8249-0a697e526ec9" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.6:8443/readyz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.080437 5002 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-9td8r container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.080490 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" podUID="306f45fb-8587-468f-8032-ea87c84f9953" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.080882 5002 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-xx7sk container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.081051 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" podUID="c6cd8f6f-3bf5-4725-a936-67049ff23b9b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.116643 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" podStartSLOduration=130.116625108 podStartE2EDuration="2m10.116625108s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:52.114341737 +0000 UTC m=+155.528163625" watchObservedRunningTime="2025-12-03 16:33:52.116625108 +0000 UTC m=+155.530446996" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.117006 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-vffvr" podStartSLOduration=11.117001548 podStartE2EDuration="11.117001548s" podCreationTimestamp="2025-12-03 16:33:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:52.068015216 +0000 UTC m=+155.481837104" watchObservedRunningTime="2025-12-03 16:33:52.117001548 +0000 UTC m=+155.530823436" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.158452 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-stdlq" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.159198 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xq8x8" podStartSLOduration=130.159162637 podStartE2EDuration="2m10.159162637s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:52.156719742 +0000 UTC m=+155.570541640" watchObservedRunningTime="2025-12-03 16:33:52.159162637 +0000 UTC m=+155.572984525" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.180026 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:52 crc kubenswrapper[5002]: E1203 16:33:52.191057 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:52.691039491 +0000 UTC m=+156.104861379 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.233868 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" podStartSLOduration=130.233844828 podStartE2EDuration="2m10.233844828s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:52.206994899 +0000 UTC m=+155.620816787" watchObservedRunningTime="2025-12-03 16:33:52.233844828 +0000 UTC m=+155.647666716" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.284051 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:52 crc kubenswrapper[5002]: E1203 16:33:52.284808 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:52.784429382 +0000 UTC m=+156.198251270 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.311712 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-j964n" podStartSLOduration=130.311690382 podStartE2EDuration="2m10.311690382s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:52.266909313 +0000 UTC m=+155.680731221" watchObservedRunningTime="2025-12-03 16:33:52.311690382 +0000 UTC m=+155.725512270" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.385927 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:52 crc kubenswrapper[5002]: E1203 16:33:52.386251 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:52.886238819 +0000 UTC m=+156.300060707 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.486994 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:52 crc kubenswrapper[5002]: E1203 16:33:52.487480 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:52.98746282 +0000 UTC m=+156.401284708 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.589942 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:52 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:52 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:52 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.590006 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.591035 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:52 crc kubenswrapper[5002]: E1203 16:33:52.591413 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:53.091394854 +0000 UTC m=+156.505216742 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.691849 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:52 crc kubenswrapper[5002]: E1203 16:33:52.692025 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:53.191993978 +0000 UTC m=+156.605815866 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.692502 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:52 crc kubenswrapper[5002]: E1203 16:33:52.692824 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:53.1928126 +0000 UTC m=+156.606634488 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.811460 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:52 crc kubenswrapper[5002]: E1203 16:33:52.811895 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:53.311876939 +0000 UTC m=+156.725698817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:52 crc kubenswrapper[5002]: I1203 16:33:52.913019 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:52 crc kubenswrapper[5002]: E1203 16:33:52.913597 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:53.413570262 +0000 UTC m=+156.827392320 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.014290 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:53 crc kubenswrapper[5002]: E1203 16:33:53.014772 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:53.514733791 +0000 UTC m=+156.928555679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.090612 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.123440 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:53 crc kubenswrapper[5002]: E1203 16:33:53.123800 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:53.623787022 +0000 UTC m=+157.037608910 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.137561 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-dvz54" event={"ID":"b5b31661-f06f-4465-a202-4824f903eeaa","Type":"ContainerStarted","Data":"4d91cb5c2c8a43759f4be69f7defe00a6d8338f536c6ec6607ac56015b6f285b"} Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.145311 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-778kb" event={"ID":"e72f9b17-a374-4625-8505-dfdeab584c8f","Type":"ContainerStarted","Data":"6f8a2f92921e01aee7553641b8c3382f1ef2db6d703bf2f377548c78086116e0"} Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.151181 5002 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-qqqkv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.151231 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" podUID="b1e5478b-5439-41eb-b83b-700e37123781" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.156565 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9td8r" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.224081 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:53 crc kubenswrapper[5002]: E1203 16:33:53.225738 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:53.725721132 +0000 UTC m=+157.139543020 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.331152 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:53 crc kubenswrapper[5002]: E1203 16:33:53.332147 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:53.832125762 +0000 UTC m=+157.245947650 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.437402 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:53 crc kubenswrapper[5002]: E1203 16:33:53.437855 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:53.937839114 +0000 UTC m=+157.351661002 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.541682 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:53 crc kubenswrapper[5002]: E1203 16:33:53.542031 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:54.042019804 +0000 UTC m=+157.455841692 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.574887 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:53 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:53 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:53 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.574960 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.583177 5002 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.642578 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:53 crc kubenswrapper[5002]: E1203 16:33:53.642961 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:54.142913276 +0000 UTC m=+157.556735164 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.643170 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:53 crc kubenswrapper[5002]: E1203 16:33:53.643519 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:54.143507152 +0000 UTC m=+157.557329040 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.745337 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:53 crc kubenswrapper[5002]: E1203 16:33:53.745687 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:54.245671317 +0000 UTC m=+157.659493205 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.845040 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2rrfp"] Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.846522 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.847530 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:53 crc kubenswrapper[5002]: E1203 16:33:53.847988 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:54.347972538 +0000 UTC m=+157.761794426 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.860313 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gtbgz"] Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.861407 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.887331 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.907240 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.914838 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gtbgz"] Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.917732 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2rrfp"] Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.949192 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.949523 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc2070de-fd9c-401b-9978-70c2fe35c939-utilities\") pod \"certified-operators-2rrfp\" (UID: \"cc2070de-fd9c-401b-9978-70c2fe35c939\") " pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.949560 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tn5c\" (UniqueName: \"kubernetes.io/projected/cc2070de-fd9c-401b-9978-70c2fe35c939-kube-api-access-2tn5c\") pod \"certified-operators-2rrfp\" (UID: \"cc2070de-fd9c-401b-9978-70c2fe35c939\") " pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.949580 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hthnm\" (UniqueName: \"kubernetes.io/projected/772ffd36-1d82-4493-96bd-09b67515116f-kube-api-access-hthnm\") pod \"community-operators-gtbgz\" (UID: \"772ffd36-1d82-4493-96bd-09b67515116f\") " pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.949628 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/772ffd36-1d82-4493-96bd-09b67515116f-utilities\") pod \"community-operators-gtbgz\" (UID: \"772ffd36-1d82-4493-96bd-09b67515116f\") " pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.949662 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc2070de-fd9c-401b-9978-70c2fe35c939-catalog-content\") pod \"certified-operators-2rrfp\" (UID: \"cc2070de-fd9c-401b-9978-70c2fe35c939\") " pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.949682 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/772ffd36-1d82-4493-96bd-09b67515116f-catalog-content\") pod \"community-operators-gtbgz\" (UID: \"772ffd36-1d82-4493-96bd-09b67515116f\") " pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:33:53 crc kubenswrapper[5002]: E1203 16:33:53.949810 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:54.449794405 +0000 UTC m=+157.863616293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.986264 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7b6mw"] Dec 03 16:33:53 crc kubenswrapper[5002]: I1203 16:33:53.987378 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.047830 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7b6mw"] Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.055779 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/772ffd36-1d82-4493-96bd-09b67515116f-catalog-content\") pod \"community-operators-gtbgz\" (UID: \"772ffd36-1d82-4493-96bd-09b67515116f\") " pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.055857 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/889a250f-9618-48a9-b381-68056983907b-catalog-content\") pod \"certified-operators-7b6mw\" (UID: \"889a250f-9618-48a9-b381-68056983907b\") " pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.055902 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc2070de-fd9c-401b-9978-70c2fe35c939-utilities\") pod \"certified-operators-2rrfp\" (UID: \"cc2070de-fd9c-401b-9978-70c2fe35c939\") " pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.055950 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tn5c\" (UniqueName: \"kubernetes.io/projected/cc2070de-fd9c-401b-9978-70c2fe35c939-kube-api-access-2tn5c\") pod \"certified-operators-2rrfp\" (UID: \"cc2070de-fd9c-401b-9978-70c2fe35c939\") " pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.055993 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.056021 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hthnm\" (UniqueName: \"kubernetes.io/projected/772ffd36-1d82-4493-96bd-09b67515116f-kube-api-access-hthnm\") pod \"community-operators-gtbgz\" (UID: \"772ffd36-1d82-4493-96bd-09b67515116f\") " pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.056052 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/889a250f-9618-48a9-b381-68056983907b-utilities\") pod \"certified-operators-7b6mw\" (UID: \"889a250f-9618-48a9-b381-68056983907b\") " pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.056077 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/772ffd36-1d82-4493-96bd-09b67515116f-utilities\") pod \"community-operators-gtbgz\" (UID: \"772ffd36-1d82-4493-96bd-09b67515116f\") " pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.056118 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc2070de-fd9c-401b-9978-70c2fe35c939-catalog-content\") pod \"certified-operators-2rrfp\" (UID: \"cc2070de-fd9c-401b-9978-70c2fe35c939\") " pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.056141 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqhz5\" (UniqueName: \"kubernetes.io/projected/889a250f-9618-48a9-b381-68056983907b-kube-api-access-fqhz5\") pod \"certified-operators-7b6mw\" (UID: \"889a250f-9618-48a9-b381-68056983907b\") " pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.056654 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/772ffd36-1d82-4493-96bd-09b67515116f-catalog-content\") pod \"community-operators-gtbgz\" (UID: \"772ffd36-1d82-4493-96bd-09b67515116f\") " pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.057031 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc2070de-fd9c-401b-9978-70c2fe35c939-utilities\") pod \"certified-operators-2rrfp\" (UID: \"cc2070de-fd9c-401b-9978-70c2fe35c939\") " pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:33:54 crc kubenswrapper[5002]: E1203 16:33:54.057710 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:54.557693324 +0000 UTC m=+157.971515212 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.058319 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/772ffd36-1d82-4493-96bd-09b67515116f-utilities\") pod \"community-operators-gtbgz\" (UID: \"772ffd36-1d82-4493-96bd-09b67515116f\") " pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.058527 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc2070de-fd9c-401b-9978-70c2fe35c939-catalog-content\") pod \"certified-operators-2rrfp\" (UID: \"cc2070de-fd9c-401b-9978-70c2fe35c939\") " pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.104512 5002 patch_prober.go:28] interesting pod/downloads-7954f5f757-bpkhn container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.104714 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-bpkhn" podUID="5fa63999-6c0a-4b33-8585-ef7c04ceab79" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.105010 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.105062 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.113836 5002 patch_prober.go:28] interesting pod/downloads-7954f5f757-bpkhn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.114038 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bpkhn" podUID="5fa63999-6c0a-4b33-8585-ef7c04ceab79" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.115141 5002 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-03T16:33:53.583207486Z","Handler":null,"Name":""} Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.117823 5002 patch_prober.go:28] interesting pod/console-f9d7485db-68mq2 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.117882 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-68mq2" podUID="2ff3812c-cb2a-4b07-b140-0f0b97b35e13" containerName="console" probeResult="failure" output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.149989 5002 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-xx7sk container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.150083 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" podUID="c6cd8f6f-3bf5-4725-a936-67049ff23b9b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.151417 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tn5c\" (UniqueName: \"kubernetes.io/projected/cc2070de-fd9c-401b-9978-70c2fe35c939-kube-api-access-2tn5c\") pod \"certified-operators-2rrfp\" (UID: \"cc2070de-fd9c-401b-9978-70c2fe35c939\") " pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.158513 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.159048 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.161087 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6jwj6"] Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.202278 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hthnm\" (UniqueName: \"kubernetes.io/projected/772ffd36-1d82-4493-96bd-09b67515116f-kube-api-access-hthnm\") pod \"community-operators-gtbgz\" (UID: \"772ffd36-1d82-4493-96bd-09b67515116f\") " pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.213007 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:54 crc kubenswrapper[5002]: E1203 16:33:54.213332 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:33:54.713301952 +0000 UTC m=+158.127123840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.213967 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqhz5\" (UniqueName: \"kubernetes.io/projected/889a250f-9618-48a9-b381-68056983907b-kube-api-access-fqhz5\") pod \"certified-operators-7b6mw\" (UID: \"889a250f-9618-48a9-b381-68056983907b\") " pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.214082 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/889a250f-9618-48a9-b381-68056983907b-catalog-content\") pod \"certified-operators-7b6mw\" (UID: \"889a250f-9618-48a9-b381-68056983907b\") " pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.214236 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.214345 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/889a250f-9618-48a9-b381-68056983907b-utilities\") pod \"certified-operators-7b6mw\" (UID: \"889a250f-9618-48a9-b381-68056983907b\") " pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.217315 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/889a250f-9618-48a9-b381-68056983907b-catalog-content\") pod \"certified-operators-7b6mw\" (UID: \"889a250f-9618-48a9-b381-68056983907b\") " pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.217791 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/889a250f-9618-48a9-b381-68056983907b-utilities\") pod \"certified-operators-7b6mw\" (UID: \"889a250f-9618-48a9-b381-68056983907b\") " pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:33:54 crc kubenswrapper[5002]: E1203 16:33:54.217998 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:33:54.717981718 +0000 UTC m=+158.131803606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6qpdx" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.219053 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.219917 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.228408 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.269349 5002 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.269661 5002 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.276484 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-778kb" event={"ID":"e72f9b17-a374-4625-8505-dfdeab584c8f","Type":"ContainerStarted","Data":"790cc036f23dec038f8fc6edde4ee348b25b4d0c3efbe7c2f099b8471b9772c6"} Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.276529 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-778kb" event={"ID":"e72f9b17-a374-4625-8505-dfdeab584c8f","Type":"ContainerStarted","Data":"3f09bd03b1b0f5167607aabc0b42a4f97104457f600b3918e1d02dd3003567b5"} Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.280870 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6jwj6"] Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.281695 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.281766 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.326864 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.327443 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.348147 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqhz5\" (UniqueName: \"kubernetes.io/projected/889a250f-9618-48a9-b381-68056983907b-kube-api-access-fqhz5\") pod \"certified-operators-7b6mw\" (UID: \"889a250f-9618-48a9-b381-68056983907b\") " pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.415712 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.429995 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-778kb" podStartSLOduration=12.429978325 podStartE2EDuration="12.429978325s" podCreationTimestamp="2025-12-03 16:33:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:54.421418706 +0000 UTC m=+157.835240594" watchObservedRunningTime="2025-12-03 16:33:54.429978325 +0000 UTC m=+157.843800213" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.431720 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmpxg\" (UniqueName: \"kubernetes.io/projected/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-kube-api-access-rmpxg\") pod \"community-operators-6jwj6\" (UID: \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\") " pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.431814 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-utilities\") pod \"community-operators-6jwj6\" (UID: \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\") " pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.431945 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.432077 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-catalog-content\") pod \"community-operators-6jwj6\" (UID: \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\") " pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.459883 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.459962 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.525726 5002 patch_prober.go:28] interesting pod/apiserver-76f77b778f-6zz9b container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 03 16:33:54 crc kubenswrapper[5002]: [+]log ok Dec 03 16:33:54 crc kubenswrapper[5002]: [+]etcd ok Dec 03 16:33:54 crc kubenswrapper[5002]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 03 16:33:54 crc kubenswrapper[5002]: [+]poststarthook/generic-apiserver-start-informers ok Dec 03 16:33:54 crc kubenswrapper[5002]: [+]poststarthook/max-in-flight-filter ok Dec 03 16:33:54 crc kubenswrapper[5002]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 03 16:33:54 crc kubenswrapper[5002]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 03 16:33:54 crc kubenswrapper[5002]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 03 16:33:54 crc kubenswrapper[5002]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 03 16:33:54 crc kubenswrapper[5002]: [+]poststarthook/project.openshift.io-projectcache ok Dec 03 16:33:54 crc kubenswrapper[5002]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 03 16:33:54 crc kubenswrapper[5002]: [+]poststarthook/openshift.io-startinformers ok Dec 03 16:33:54 crc kubenswrapper[5002]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 03 16:33:54 crc kubenswrapper[5002]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 03 16:33:54 crc kubenswrapper[5002]: livez check failed Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.525840 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" podUID="fc315c4e-b735-4cd3-92d2-34b505810a5d" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.533468 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-catalog-content\") pod \"community-operators-6jwj6\" (UID: \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\") " pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.533554 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmpxg\" (UniqueName: \"kubernetes.io/projected/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-kube-api-access-rmpxg\") pod \"community-operators-6jwj6\" (UID: \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\") " pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.533617 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-utilities\") pod \"community-operators-6jwj6\" (UID: \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\") " pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.534286 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-utilities\") pod \"community-operators-6jwj6\" (UID: \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\") " pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.534712 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-catalog-content\") pod \"community-operators-6jwj6\" (UID: \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\") " pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.567026 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.575652 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:54 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:54 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:54 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.575717 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.580578 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmpxg\" (UniqueName: \"kubernetes.io/projected/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-kube-api-access-rmpxg\") pod \"community-operators-6jwj6\" (UID: \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\") " pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.651114 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.651271 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:33:54 crc kubenswrapper[5002]: I1203 16:33:54.897284 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:54.997962 5002 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-qqqkv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:54.998036 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" podUID="b1e5478b-5439-41eb-b83b-700e37123781" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:54.998491 5002 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-qqqkv container/marketplace-operator namespace/openshift-marketplace: Liveness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:54.998510 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" podUID="b1e5478b-5439-41eb-b83b-700e37123781" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.002073 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-sprqt" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.031551 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6qpdx\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.069105 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xx7sk" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.162432 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gtbgz"] Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.305084 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.386461 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtbgz" event={"ID":"772ffd36-1d82-4493-96bd-09b67515116f","Type":"ContainerStarted","Data":"3bfa92427e500a65aefd2c058bdd0bf1df474cc79cde31478b51d23a66f496ed"} Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.430299 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-72gmf" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.470042 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9wqft"] Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.471171 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.503333 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.505624 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9wqft"] Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.591499 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04e2999b-f568-479e-b811-44a0d9082524-utilities\") pod \"redhat-marketplace-9wqft\" (UID: \"04e2999b-f568-479e-b811-44a0d9082524\") " pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.591976 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04e2999b-f568-479e-b811-44a0d9082524-catalog-content\") pod \"redhat-marketplace-9wqft\" (UID: \"04e2999b-f568-479e-b811-44a0d9082524\") " pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.592226 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ppcw\" (UniqueName: \"kubernetes.io/projected/04e2999b-f568-479e-b811-44a0d9082524-kube-api-access-2ppcw\") pod \"redhat-marketplace-9wqft\" (UID: \"04e2999b-f568-479e-b811-44a0d9082524\") " pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.660345 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:55 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:55 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:55 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.660424 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.694798 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ppcw\" (UniqueName: \"kubernetes.io/projected/04e2999b-f568-479e-b811-44a0d9082524-kube-api-access-2ppcw\") pod \"redhat-marketplace-9wqft\" (UID: \"04e2999b-f568-479e-b811-44a0d9082524\") " pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.694845 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04e2999b-f568-479e-b811-44a0d9082524-utilities\") pod \"redhat-marketplace-9wqft\" (UID: \"04e2999b-f568-479e-b811-44a0d9082524\") " pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.694871 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04e2999b-f568-479e-b811-44a0d9082524-catalog-content\") pod \"redhat-marketplace-9wqft\" (UID: \"04e2999b-f568-479e-b811-44a0d9082524\") " pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.695907 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04e2999b-f568-479e-b811-44a0d9082524-catalog-content\") pod \"redhat-marketplace-9wqft\" (UID: \"04e2999b-f568-479e-b811-44a0d9082524\") " pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.696682 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04e2999b-f568-479e-b811-44a0d9082524-utilities\") pod \"redhat-marketplace-9wqft\" (UID: \"04e2999b-f568-479e-b811-44a0d9082524\") " pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.747941 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ppcw\" (UniqueName: \"kubernetes.io/projected/04e2999b-f568-479e-b811-44a0d9082524-kube-api-access-2ppcw\") pod \"redhat-marketplace-9wqft\" (UID: \"04e2999b-f568-479e-b811-44a0d9082524\") " pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.795190 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2rrfp"] Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.863543 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.865465 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-scrwq"] Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.866616 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.926331 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-scrwq"] Dec 03 16:33:55 crc kubenswrapper[5002]: I1203 16:33:55.963006 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6jwj6"] Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.014944 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ds59\" (UniqueName: \"kubernetes.io/projected/78b49351-215c-44ad-b9f3-13090f994617-kube-api-access-4ds59\") pod \"redhat-marketplace-scrwq\" (UID: \"78b49351-215c-44ad-b9f3-13090f994617\") " pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.015070 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78b49351-215c-44ad-b9f3-13090f994617-utilities\") pod \"redhat-marketplace-scrwq\" (UID: \"78b49351-215c-44ad-b9f3-13090f994617\") " pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.015135 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78b49351-215c-44ad-b9f3-13090f994617-catalog-content\") pod \"redhat-marketplace-scrwq\" (UID: \"78b49351-215c-44ad-b9f3-13090f994617\") " pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.116742 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78b49351-215c-44ad-b9f3-13090f994617-catalog-content\") pod \"redhat-marketplace-scrwq\" (UID: \"78b49351-215c-44ad-b9f3-13090f994617\") " pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.116884 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ds59\" (UniqueName: \"kubernetes.io/projected/78b49351-215c-44ad-b9f3-13090f994617-kube-api-access-4ds59\") pod \"redhat-marketplace-scrwq\" (UID: \"78b49351-215c-44ad-b9f3-13090f994617\") " pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.116980 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78b49351-215c-44ad-b9f3-13090f994617-utilities\") pod \"redhat-marketplace-scrwq\" (UID: \"78b49351-215c-44ad-b9f3-13090f994617\") " pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.117598 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78b49351-215c-44ad-b9f3-13090f994617-utilities\") pod \"redhat-marketplace-scrwq\" (UID: \"78b49351-215c-44ad-b9f3-13090f994617\") " pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.117689 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78b49351-215c-44ad-b9f3-13090f994617-catalog-content\") pod \"redhat-marketplace-scrwq\" (UID: \"78b49351-215c-44ad-b9f3-13090f994617\") " pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.120403 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7b6mw"] Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.182907 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ds59\" (UniqueName: \"kubernetes.io/projected/78b49351-215c-44ad-b9f3-13090f994617-kube-api-access-4ds59\") pod \"redhat-marketplace-scrwq\" (UID: \"78b49351-215c-44ad-b9f3-13090f994617\") " pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.203231 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-6qpdx"] Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.252180 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.398961 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwj6" event={"ID":"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0","Type":"ContainerStarted","Data":"77a2eb025fa6f4df906cd063418f07311b2e27d3eab9ce5d2b8b458ce10fbcec"} Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.399960 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" event={"ID":"56badc5f-4e9d-4129-855f-4c2a54eb63d8","Type":"ContainerStarted","Data":"7d144f941c74168a20d6d792f9ad026a786f7a14e3bdbc5faed1f974b3766022"} Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.400820 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2rrfp" event={"ID":"cc2070de-fd9c-401b-9978-70c2fe35c939","Type":"ContainerStarted","Data":"34bb937eb63138831452d2f8273dd5a742962f0e38b19b2c9d1d895a0810727e"} Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.401620 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7b6mw" event={"ID":"889a250f-9618-48a9-b381-68056983907b","Type":"ContainerStarted","Data":"ed1587b17747e9a5e408ab17289660363a136765ea7bed32ea28839ab38ea08e"} Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.403232 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtbgz" event={"ID":"772ffd36-1d82-4493-96bd-09b67515116f","Type":"ContainerStarted","Data":"db25800c655b387ce24dd1427bda79b42b6fdc9a0d3c6df12b71abcf1913aa0f"} Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.494045 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9wqft"] Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.567704 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:56 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:56 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:56 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.567780 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.644931 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9j4z7"] Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.646658 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.651218 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.658612 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9j4z7"] Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.734439 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-catalog-content\") pod \"redhat-operators-9j4z7\" (UID: \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\") " pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.734533 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq9c9\" (UniqueName: \"kubernetes.io/projected/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-kube-api-access-fq9c9\") pod \"redhat-operators-9j4z7\" (UID: \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\") " pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.734571 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-utilities\") pod \"redhat-operators-9j4z7\" (UID: \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\") " pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.768396 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-scrwq"] Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.835125 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-utilities\") pod \"redhat-operators-9j4z7\" (UID: \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\") " pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.835191 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-catalog-content\") pod \"redhat-operators-9j4z7\" (UID: \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\") " pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.835255 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq9c9\" (UniqueName: \"kubernetes.io/projected/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-kube-api-access-fq9c9\") pod \"redhat-operators-9j4z7\" (UID: \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\") " pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.835925 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-utilities\") pod \"redhat-operators-9j4z7\" (UID: \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\") " pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.835933 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-catalog-content\") pod \"redhat-operators-9j4z7\" (UID: \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\") " pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.855078 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq9c9\" (UniqueName: \"kubernetes.io/projected/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-kube-api-access-fq9c9\") pod \"redhat-operators-9j4z7\" (UID: \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\") " pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:33:56 crc kubenswrapper[5002]: I1203 16:33:56.888084 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.052912 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qkg9k"] Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.061674 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qkg9k"] Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.062190 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.147697 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-catalog-content\") pod \"redhat-operators-qkg9k\" (UID: \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\") " pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.147799 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-utilities\") pod \"redhat-operators-qkg9k\" (UID: \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\") " pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.147833 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw6cg\" (UniqueName: \"kubernetes.io/projected/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-kube-api-access-vw6cg\") pod \"redhat-operators-qkg9k\" (UID: \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\") " pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.249894 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-utilities\") pod \"redhat-operators-qkg9k\" (UID: \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\") " pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.250277 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw6cg\" (UniqueName: \"kubernetes.io/projected/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-kube-api-access-vw6cg\") pod \"redhat-operators-qkg9k\" (UID: \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\") " pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.250351 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-catalog-content\") pod \"redhat-operators-qkg9k\" (UID: \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\") " pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.250548 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-utilities\") pod \"redhat-operators-qkg9k\" (UID: \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\") " pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.250943 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-catalog-content\") pod \"redhat-operators-qkg9k\" (UID: \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\") " pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.276847 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw6cg\" (UniqueName: \"kubernetes.io/projected/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-kube-api-access-vw6cg\") pod \"redhat-operators-qkg9k\" (UID: \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\") " pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.322965 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9j4z7"] Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.378955 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.409781 5002 generic.go:334] "Generic (PLEG): container finished" podID="889a250f-9618-48a9-b381-68056983907b" containerID="bbe4cebe7f1115c7264b3b73ffde8538fd9732418fc90283e8e9144280275b81" exitCode=0 Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.410092 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7b6mw" event={"ID":"889a250f-9618-48a9-b381-68056983907b","Type":"ContainerDied","Data":"bbe4cebe7f1115c7264b3b73ffde8538fd9732418fc90283e8e9144280275b81"} Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.413024 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.415462 5002 generic.go:334] "Generic (PLEG): container finished" podID="772ffd36-1d82-4493-96bd-09b67515116f" containerID="db25800c655b387ce24dd1427bda79b42b6fdc9a0d3c6df12b71abcf1913aa0f" exitCode=0 Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.415534 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtbgz" event={"ID":"772ffd36-1d82-4493-96bd-09b67515116f","Type":"ContainerDied","Data":"db25800c655b387ce24dd1427bda79b42b6fdc9a0d3c6df12b71abcf1913aa0f"} Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.436267 5002 generic.go:334] "Generic (PLEG): container finished" podID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" containerID="198d18fd1068ac020af43001265e34b16a2b98bd165d7ae8990e0e130cb006e1" exitCode=0 Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.436350 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwj6" event={"ID":"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0","Type":"ContainerDied","Data":"198d18fd1068ac020af43001265e34b16a2b98bd165d7ae8990e0e130cb006e1"} Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.438711 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" event={"ID":"56badc5f-4e9d-4129-855f-4c2a54eb63d8","Type":"ContainerStarted","Data":"8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178"} Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.438824 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.447468 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scrwq" event={"ID":"78b49351-215c-44ad-b9f3-13090f994617","Type":"ContainerStarted","Data":"1cbb7906f6cfd0be674180a1f47f30390d77a26a32baacc5ffb2d3178fe38f51"} Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.466585 5002 generic.go:334] "Generic (PLEG): container finished" podID="04e2999b-f568-479e-b811-44a0d9082524" containerID="836da8d2251401e5210972881944254de5598534bdfd6c393c3f5c7cb95445bb" exitCode=0 Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.467147 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9wqft" event={"ID":"04e2999b-f568-479e-b811-44a0d9082524","Type":"ContainerDied","Data":"836da8d2251401e5210972881944254de5598534bdfd6c393c3f5c7cb95445bb"} Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.467236 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9wqft" event={"ID":"04e2999b-f568-479e-b811-44a0d9082524","Type":"ContainerStarted","Data":"4408883e9aa3eaa8e999476466d114661f14e4b31a633c351edf59ebfeb974ed"} Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.489445 5002 generic.go:334] "Generic (PLEG): container finished" podID="cc2070de-fd9c-401b-9978-70c2fe35c939" containerID="dff97e580cf61ec2b05caf0a22b49b51e04db910a425a5e0c3017af244494ad7" exitCode=0 Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.489793 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2rrfp" event={"ID":"cc2070de-fd9c-401b-9978-70c2fe35c939","Type":"ContainerDied","Data":"dff97e580cf61ec2b05caf0a22b49b51e04db910a425a5e0c3017af244494ad7"} Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.506624 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9j4z7" event={"ID":"6b80fc07-401e-4ddd-8a97-c66cc66d68b5","Type":"ContainerStarted","Data":"2c5de96f49b54d7af2b0b59a42d2445e9eeb0294f153e53bcad9effedeed8028"} Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.519223 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" podStartSLOduration=135.519196793 podStartE2EDuration="2m15.519196793s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:57.514838186 +0000 UTC m=+160.928660084" watchObservedRunningTime="2025-12-03 16:33:57.519196793 +0000 UTC m=+160.933018681" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.571374 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:57 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:57 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:57 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.571883 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:57 crc kubenswrapper[5002]: I1203 16:33:57.957229 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qkg9k"] Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.246967 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.248024 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.252019 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.252354 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.262142 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.379726 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.380657 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.384446 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5016f9b2-8e54-4fbb-a415-3370652dec37-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5016f9b2-8e54-4fbb-a415-3370652dec37\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.384509 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5016f9b2-8e54-4fbb-a415-3370652dec37-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5016f9b2-8e54-4fbb-a415-3370652dec37\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.385632 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.386371 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.390242 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.486564 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5016f9b2-8e54-4fbb-a415-3370652dec37-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5016f9b2-8e54-4fbb-a415-3370652dec37\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.486687 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1af72c29-21ce-4497-885c-a00ca156b2ab-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1af72c29-21ce-4497-885c-a00ca156b2ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.486788 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1af72c29-21ce-4497-885c-a00ca156b2ab-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1af72c29-21ce-4497-885c-a00ca156b2ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.486814 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5016f9b2-8e54-4fbb-a415-3370652dec37-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5016f9b2-8e54-4fbb-a415-3370652dec37\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.487402 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5016f9b2-8e54-4fbb-a415-3370652dec37-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5016f9b2-8e54-4fbb-a415-3370652dec37\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.506429 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5016f9b2-8e54-4fbb-a415-3370652dec37-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5016f9b2-8e54-4fbb-a415-3370652dec37\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.515852 5002 generic.go:334] "Generic (PLEG): container finished" podID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" containerID="1ba60d80dcc6f03a13c22f47219efc706abc545da1a047961c9700b8ad7251c0" exitCode=0 Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.516109 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9j4z7" event={"ID":"6b80fc07-401e-4ddd-8a97-c66cc66d68b5","Type":"ContainerDied","Data":"1ba60d80dcc6f03a13c22f47219efc706abc545da1a047961c9700b8ad7251c0"} Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.523793 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4a92b26-42c8-4f22-bfba-6c63140c6501" containerID="8a381b9d3db8b02b2ce9ab7c166d60f557510bb96325d5894c8a5ede1ec5ae4c" exitCode=0 Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.523873 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" event={"ID":"f4a92b26-42c8-4f22-bfba-6c63140c6501","Type":"ContainerDied","Data":"8a381b9d3db8b02b2ce9ab7c166d60f557510bb96325d5894c8a5ede1ec5ae4c"} Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.539822 5002 generic.go:334] "Generic (PLEG): container finished" podID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" containerID="2bb6fb883dd4aa2bc30d4a43fb5c5602d8ae254db6575396683570a8175f7eb9" exitCode=0 Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.539908 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qkg9k" event={"ID":"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c","Type":"ContainerDied","Data":"2bb6fb883dd4aa2bc30d4a43fb5c5602d8ae254db6575396683570a8175f7eb9"} Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.539944 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qkg9k" event={"ID":"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c","Type":"ContainerStarted","Data":"26e15d370a82cbf9d3837aa3999e10d886771e7a4c3f4501876da4c270814024"} Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.553352 5002 generic.go:334] "Generic (PLEG): container finished" podID="78b49351-215c-44ad-b9f3-13090f994617" containerID="b9b6d9681818a24273cbb1fed595b86cee5a90001eaca92b48ce88da6538f27a" exitCode=0 Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.553412 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scrwq" event={"ID":"78b49351-215c-44ad-b9f3-13090f994617","Type":"ContainerDied","Data":"b9b6d9681818a24273cbb1fed595b86cee5a90001eaca92b48ce88da6538f27a"} Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.570581 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:58 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:58 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:58 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.570651 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.587828 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1af72c29-21ce-4497-885c-a00ca156b2ab-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1af72c29-21ce-4497-885c-a00ca156b2ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.587944 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1af72c29-21ce-4497-885c-a00ca156b2ab-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1af72c29-21ce-4497-885c-a00ca156b2ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.588104 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1af72c29-21ce-4497-885c-a00ca156b2ab-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1af72c29-21ce-4497-885c-a00ca156b2ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.618719 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1af72c29-21ce-4497-885c-a00ca156b2ab-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1af72c29-21ce-4497-885c-a00ca156b2ab\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.636600 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:33:58 crc kubenswrapper[5002]: I1203 16:33:58.764386 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:33:59 crc kubenswrapper[5002]: I1203 16:33:59.083346 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 16:33:59 crc kubenswrapper[5002]: W1203 16:33:59.137193 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod5016f9b2_8e54_4fbb_a415_3370652dec37.slice/crio-2c6e49e43098ad4aaff41b3c7ca7fa2624f95afcf9d19b01f78768dcee4f49c4 WatchSource:0}: Error finding container 2c6e49e43098ad4aaff41b3c7ca7fa2624f95afcf9d19b01f78768dcee4f49c4: Status 404 returned error can't find the container with id 2c6e49e43098ad4aaff41b3c7ca7fa2624f95afcf9d19b01f78768dcee4f49c4 Dec 03 16:33:59 crc kubenswrapper[5002]: I1203 16:33:59.167730 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 16:33:59 crc kubenswrapper[5002]: I1203 16:33:59.176478 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:59 crc kubenswrapper[5002]: I1203 16:33:59.203724 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-6zz9b" Dec 03 16:33:59 crc kubenswrapper[5002]: W1203 16:33:59.221572 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod1af72c29_21ce_4497_885c_a00ca156b2ab.slice/crio-458125cfa3f9715d50ddbcf76b387c52426b814255a5427ca22340fb92d65e01 WatchSource:0}: Error finding container 458125cfa3f9715d50ddbcf76b387c52426b814255a5427ca22340fb92d65e01: Status 404 returned error can't find the container with id 458125cfa3f9715d50ddbcf76b387c52426b814255a5427ca22340fb92d65e01 Dec 03 16:33:59 crc kubenswrapper[5002]: I1203 16:33:59.574063 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:33:59 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:33:59 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:33:59 crc kubenswrapper[5002]: healthz check failed Dec 03 16:33:59 crc kubenswrapper[5002]: I1203 16:33:59.574127 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:33:59 crc kubenswrapper[5002]: I1203 16:33:59.677962 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1af72c29-21ce-4497-885c-a00ca156b2ab","Type":"ContainerStarted","Data":"458125cfa3f9715d50ddbcf76b387c52426b814255a5427ca22340fb92d65e01"} Dec 03 16:33:59 crc kubenswrapper[5002]: I1203 16:33:59.701525 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5016f9b2-8e54-4fbb-a415-3370652dec37","Type":"ContainerStarted","Data":"2c6e49e43098ad4aaff41b3c7ca7fa2624f95afcf9d19b01f78768dcee4f49c4"} Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.069907 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-dvz54" Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.334090 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.421396 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlbjp\" (UniqueName: \"kubernetes.io/projected/f4a92b26-42c8-4f22-bfba-6c63140c6501-kube-api-access-mlbjp\") pod \"f4a92b26-42c8-4f22-bfba-6c63140c6501\" (UID: \"f4a92b26-42c8-4f22-bfba-6c63140c6501\") " Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.421493 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4a92b26-42c8-4f22-bfba-6c63140c6501-config-volume\") pod \"f4a92b26-42c8-4f22-bfba-6c63140c6501\" (UID: \"f4a92b26-42c8-4f22-bfba-6c63140c6501\") " Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.421561 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f4a92b26-42c8-4f22-bfba-6c63140c6501-secret-volume\") pod \"f4a92b26-42c8-4f22-bfba-6c63140c6501\" (UID: \"f4a92b26-42c8-4f22-bfba-6c63140c6501\") " Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.425213 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4a92b26-42c8-4f22-bfba-6c63140c6501-config-volume" (OuterVolumeSpecName: "config-volume") pod "f4a92b26-42c8-4f22-bfba-6c63140c6501" (UID: "f4a92b26-42c8-4f22-bfba-6c63140c6501"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.456131 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4a92b26-42c8-4f22-bfba-6c63140c6501-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f4a92b26-42c8-4f22-bfba-6c63140c6501" (UID: "f4a92b26-42c8-4f22-bfba-6c63140c6501"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.486206 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4a92b26-42c8-4f22-bfba-6c63140c6501-kube-api-access-mlbjp" (OuterVolumeSpecName: "kube-api-access-mlbjp") pod "f4a92b26-42c8-4f22-bfba-6c63140c6501" (UID: "f4a92b26-42c8-4f22-bfba-6c63140c6501"). InnerVolumeSpecName "kube-api-access-mlbjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.523740 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlbjp\" (UniqueName: \"kubernetes.io/projected/f4a92b26-42c8-4f22-bfba-6c63140c6501-kube-api-access-mlbjp\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.523789 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4a92b26-42c8-4f22-bfba-6c63140c6501-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.523799 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f4a92b26-42c8-4f22-bfba-6c63140c6501-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.569759 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:34:00 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:34:00 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:34:00 crc kubenswrapper[5002]: healthz check failed Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.569880 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.720022 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" event={"ID":"f4a92b26-42c8-4f22-bfba-6c63140c6501","Type":"ContainerDied","Data":"7276fc0d926873a168dd78eb2e24cda4eea565781e6725ba384a2fc6b4f6389f"} Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.720067 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7276fc0d926873a168dd78eb2e24cda4eea565781e6725ba384a2fc6b4f6389f" Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.720133 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp" Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.760458 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1af72c29-21ce-4497-885c-a00ca156b2ab","Type":"ContainerStarted","Data":"b0e3afc6ecfb154d1c844d2c26f2f4ad3e39789c55a10ebc0505fc19ead37b5d"} Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.766135 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5016f9b2-8e54-4fbb-a415-3370652dec37","Type":"ContainerStarted","Data":"a2b338f723d0047249d1bb406134c95c6bf2f893795961a87da8afb9755d24da"} Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.800472 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.800452494 podStartE2EDuration="2.800452494s" podCreationTimestamp="2025-12-03 16:33:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:34:00.788304818 +0000 UTC m=+164.202126726" watchObservedRunningTime="2025-12-03 16:34:00.800452494 +0000 UTC m=+164.214274382" Dec 03 16:34:00 crc kubenswrapper[5002]: I1203 16:34:00.822611 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.822591827 podStartE2EDuration="2.822591827s" podCreationTimestamp="2025-12-03 16:33:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:34:00.822493244 +0000 UTC m=+164.236315142" watchObservedRunningTime="2025-12-03 16:34:00.822591827 +0000 UTC m=+164.236413715" Dec 03 16:34:01 crc kubenswrapper[5002]: I1203 16:34:01.565845 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:34:01 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:34:01 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:34:01 crc kubenswrapper[5002]: healthz check failed Dec 03 16:34:01 crc kubenswrapper[5002]: I1203 16:34:01.565917 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:34:01 crc kubenswrapper[5002]: I1203 16:34:01.784099 5002 generic.go:334] "Generic (PLEG): container finished" podID="5016f9b2-8e54-4fbb-a415-3370652dec37" containerID="a2b338f723d0047249d1bb406134c95c6bf2f893795961a87da8afb9755d24da" exitCode=0 Dec 03 16:34:01 crc kubenswrapper[5002]: I1203 16:34:01.784899 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5016f9b2-8e54-4fbb-a415-3370652dec37","Type":"ContainerDied","Data":"a2b338f723d0047249d1bb406134c95c6bf2f893795961a87da8afb9755d24da"} Dec 03 16:34:02 crc kubenswrapper[5002]: I1203 16:34:02.565587 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:34:02 crc kubenswrapper[5002]: [-]has-synced failed: reason withheld Dec 03 16:34:02 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:34:02 crc kubenswrapper[5002]: healthz check failed Dec 03 16:34:02 crc kubenswrapper[5002]: I1203 16:34:02.565662 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:34:02 crc kubenswrapper[5002]: I1203 16:34:02.814354 5002 generic.go:334] "Generic (PLEG): container finished" podID="1af72c29-21ce-4497-885c-a00ca156b2ab" containerID="b0e3afc6ecfb154d1c844d2c26f2f4ad3e39789c55a10ebc0505fc19ead37b5d" exitCode=0 Dec 03 16:34:02 crc kubenswrapper[5002]: I1203 16:34:02.814669 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1af72c29-21ce-4497-885c-a00ca156b2ab","Type":"ContainerDied","Data":"b0e3afc6ecfb154d1c844d2c26f2f4ad3e39789c55a10ebc0505fc19ead37b5d"} Dec 03 16:34:03 crc kubenswrapper[5002]: I1203 16:34:03.303046 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:34:03 crc kubenswrapper[5002]: I1203 16:34:03.419085 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5016f9b2-8e54-4fbb-a415-3370652dec37-kubelet-dir\") pod \"5016f9b2-8e54-4fbb-a415-3370652dec37\" (UID: \"5016f9b2-8e54-4fbb-a415-3370652dec37\") " Dec 03 16:34:03 crc kubenswrapper[5002]: I1203 16:34:03.419207 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5016f9b2-8e54-4fbb-a415-3370652dec37-kube-api-access\") pod \"5016f9b2-8e54-4fbb-a415-3370652dec37\" (UID: \"5016f9b2-8e54-4fbb-a415-3370652dec37\") " Dec 03 16:34:03 crc kubenswrapper[5002]: I1203 16:34:03.419389 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5016f9b2-8e54-4fbb-a415-3370652dec37-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5016f9b2-8e54-4fbb-a415-3370652dec37" (UID: "5016f9b2-8e54-4fbb-a415-3370652dec37"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:34:03 crc kubenswrapper[5002]: I1203 16:34:03.419779 5002 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5016f9b2-8e54-4fbb-a415-3370652dec37-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:03 crc kubenswrapper[5002]: I1203 16:34:03.431036 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5016f9b2-8e54-4fbb-a415-3370652dec37-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5016f9b2-8e54-4fbb-a415-3370652dec37" (UID: "5016f9b2-8e54-4fbb-a415-3370652dec37"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:34:03 crc kubenswrapper[5002]: I1203 16:34:03.521337 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5016f9b2-8e54-4fbb-a415-3370652dec37-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:03 crc kubenswrapper[5002]: I1203 16:34:03.564201 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:34:03 crc kubenswrapper[5002]: [+]has-synced ok Dec 03 16:34:03 crc kubenswrapper[5002]: [+]process-running ok Dec 03 16:34:03 crc kubenswrapper[5002]: healthz check failed Dec 03 16:34:03 crc kubenswrapper[5002]: I1203 16:34:03.564270 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:34:03 crc kubenswrapper[5002]: I1203 16:34:03.846980 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5016f9b2-8e54-4fbb-a415-3370652dec37","Type":"ContainerDied","Data":"2c6e49e43098ad4aaff41b3c7ca7fa2624f95afcf9d19b01f78768dcee4f49c4"} Dec 03 16:34:03 crc kubenswrapper[5002]: I1203 16:34:03.847030 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:34:03 crc kubenswrapper[5002]: I1203 16:34:03.847039 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c6e49e43098ad4aaff41b3c7ca7fa2624f95afcf9d19b01f78768dcee4f49c4" Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.101639 5002 patch_prober.go:28] interesting pod/downloads-7954f5f757-bpkhn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.101694 5002 patch_prober.go:28] interesting pod/downloads-7954f5f757-bpkhn container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.101707 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bpkhn" podUID="5fa63999-6c0a-4b33-8585-ef7c04ceab79" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.101736 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-bpkhn" podUID="5fa63999-6c0a-4b33-8585-ef7c04ceab79" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.101780 5002 patch_prober.go:28] interesting pod/console-f9d7485db-68mq2 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.101888 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-68mq2" podUID="2ff3812c-cb2a-4b07-b140-0f0b97b35e13" containerName="console" probeResult="failure" output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.454010 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.547955 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1af72c29-21ce-4497-885c-a00ca156b2ab-kubelet-dir\") pod \"1af72c29-21ce-4497-885c-a00ca156b2ab\" (UID: \"1af72c29-21ce-4497-885c-a00ca156b2ab\") " Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.548029 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1af72c29-21ce-4497-885c-a00ca156b2ab-kube-api-access\") pod \"1af72c29-21ce-4497-885c-a00ca156b2ab\" (UID: \"1af72c29-21ce-4497-885c-a00ca156b2ab\") " Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.548123 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1af72c29-21ce-4497-885c-a00ca156b2ab-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1af72c29-21ce-4497-885c-a00ca156b2ab" (UID: "1af72c29-21ce-4497-885c-a00ca156b2ab"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.548340 5002 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1af72c29-21ce-4497-885c-a00ca156b2ab-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.553476 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1af72c29-21ce-4497-885c-a00ca156b2ab-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1af72c29-21ce-4497-885c-a00ca156b2ab" (UID: "1af72c29-21ce-4497-885c-a00ca156b2ab"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.566194 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.569353 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-wtrm8" Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.649475 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1af72c29-21ce-4497-885c-a00ca156b2ab-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.909279 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.909462 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1af72c29-21ce-4497-885c-a00ca156b2ab","Type":"ContainerDied","Data":"458125cfa3f9715d50ddbcf76b387c52426b814255a5427ca22340fb92d65e01"} Dec 03 16:34:04 crc kubenswrapper[5002]: I1203 16:34:04.909523 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="458125cfa3f9715d50ddbcf76b387c52426b814255a5427ca22340fb92d65e01" Dec 03 16:34:05 crc kubenswrapper[5002]: I1203 16:34:05.023551 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:34:05 crc kubenswrapper[5002]: I1203 16:34:05.360875 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:34:05 crc kubenswrapper[5002]: I1203 16:34:05.371441 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/24141739-e7a8-40cf-ab9e-267ee876230b-metrics-certs\") pod \"network-metrics-daemon-c7qvw\" (UID: \"24141739-e7a8-40cf-ab9e-267ee876230b\") " pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:34:05 crc kubenswrapper[5002]: I1203 16:34:05.564430 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c7qvw" Dec 03 16:34:06 crc kubenswrapper[5002]: I1203 16:34:06.205142 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-c7qvw"] Dec 03 16:34:14 crc kubenswrapper[5002]: I1203 16:34:14.118304 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-bpkhn" Dec 03 16:34:14 crc kubenswrapper[5002]: I1203 16:34:14.126094 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:34:14 crc kubenswrapper[5002]: I1203 16:34:14.129237 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:34:15 crc kubenswrapper[5002]: I1203 16:34:15.313612 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:34:15 crc kubenswrapper[5002]: W1203 16:34:15.845733 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod24141739_e7a8_40cf_ab9e_267ee876230b.slice/crio-15d18f593d7ee8579d4eea2b2687b2f3b43daf356ae292cce1c5182e8cbcdf08 WatchSource:0}: Error finding container 15d18f593d7ee8579d4eea2b2687b2f3b43daf356ae292cce1c5182e8cbcdf08: Status 404 returned error can't find the container with id 15d18f593d7ee8579d4eea2b2687b2f3b43daf356ae292cce1c5182e8cbcdf08 Dec 03 16:34:16 crc kubenswrapper[5002]: I1203 16:34:16.035888 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" event={"ID":"24141739-e7a8-40cf-ab9e-267ee876230b","Type":"ContainerStarted","Data":"15d18f593d7ee8579d4eea2b2687b2f3b43daf356ae292cce1c5182e8cbcdf08"} Dec 03 16:34:20 crc kubenswrapper[5002]: I1203 16:34:20.916802 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:34:20 crc kubenswrapper[5002]: I1203 16:34:20.917170 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:34:24 crc kubenswrapper[5002]: I1203 16:34:24.540885 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:34:24 crc kubenswrapper[5002]: I1203 16:34:24.702392 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dqgqr" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.196164 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 16:34:36 crc kubenswrapper[5002]: E1203 16:34:36.197420 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4a92b26-42c8-4f22-bfba-6c63140c6501" containerName="collect-profiles" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.197436 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4a92b26-42c8-4f22-bfba-6c63140c6501" containerName="collect-profiles" Dec 03 16:34:36 crc kubenswrapper[5002]: E1203 16:34:36.197446 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5016f9b2-8e54-4fbb-a415-3370652dec37" containerName="pruner" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.197451 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5016f9b2-8e54-4fbb-a415-3370652dec37" containerName="pruner" Dec 03 16:34:36 crc kubenswrapper[5002]: E1203 16:34:36.197471 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1af72c29-21ce-4497-885c-a00ca156b2ab" containerName="pruner" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.197478 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1af72c29-21ce-4497-885c-a00ca156b2ab" containerName="pruner" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.197582 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4a92b26-42c8-4f22-bfba-6c63140c6501" containerName="collect-profiles" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.197591 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5016f9b2-8e54-4fbb-a415-3370652dec37" containerName="pruner" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.197599 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="1af72c29-21ce-4497-885c-a00ca156b2ab" containerName="pruner" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.198048 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.205125 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.206478 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.206725 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.291981 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70e4fadf-2e10-4d33-81c0-b9bdeda0bf83-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.292110 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/70e4fadf-2e10-4d33-81c0-b9bdeda0bf83-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.393877 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70e4fadf-2e10-4d33-81c0-b9bdeda0bf83-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.394001 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/70e4fadf-2e10-4d33-81c0-b9bdeda0bf83-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.394100 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/70e4fadf-2e10-4d33-81c0-b9bdeda0bf83-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.417332 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70e4fadf-2e10-4d33-81c0-b9bdeda0bf83-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:34:36 crc kubenswrapper[5002]: I1203 16:34:36.529934 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:34:36 crc kubenswrapper[5002]: E1203 16:34:36.733268 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 03 16:34:36 crc kubenswrapper[5002]: E1203 16:34:36.733483 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rmpxg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-6jwj6_openshift-marketplace(4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:34:36 crc kubenswrapper[5002]: E1203 16:34:36.734672 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-6jwj6" podUID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" Dec 03 16:34:38 crc kubenswrapper[5002]: E1203 16:34:38.539501 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 03 16:34:38 crc kubenswrapper[5002]: E1203 16:34:38.540280 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fqhz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-7b6mw_openshift-marketplace(889a250f-9618-48a9-b381-68056983907b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:34:38 crc kubenswrapper[5002]: E1203 16:34:38.541503 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-7b6mw" podUID="889a250f-9618-48a9-b381-68056983907b" Dec 03 16:34:41 crc kubenswrapper[5002]: E1203 16:34:41.739335 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-6jwj6" podUID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" Dec 03 16:34:41 crc kubenswrapper[5002]: E1203 16:34:41.739594 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-7b6mw" podUID="889a250f-9618-48a9-b381-68056983907b" Dec 03 16:34:41 crc kubenswrapper[5002]: E1203 16:34:41.836586 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 03 16:34:41 crc kubenswrapper[5002]: E1203 16:34:41.836839 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vw6cg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-qkg9k_openshift-marketplace(b830fe22-e3e0-4de1-8aa4-8e16a3e9594c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:34:41 crc kubenswrapper[5002]: E1203 16:34:41.838051 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-qkg9k" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" Dec 03 16:34:41 crc kubenswrapper[5002]: E1203 16:34:41.849605 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 03 16:34:41 crc kubenswrapper[5002]: E1203 16:34:41.849856 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hthnm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-gtbgz_openshift-marketplace(772ffd36-1d82-4493-96bd-09b67515116f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:34:41 crc kubenswrapper[5002]: E1203 16:34:41.852142 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-gtbgz" podUID="772ffd36-1d82-4493-96bd-09b67515116f" Dec 03 16:34:41 crc kubenswrapper[5002]: E1203 16:34:41.858114 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 03 16:34:41 crc kubenswrapper[5002]: E1203 16:34:41.858386 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fq9c9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-9j4z7_openshift-marketplace(6b80fc07-401e-4ddd-8a97-c66cc66d68b5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:34:41 crc kubenswrapper[5002]: E1203 16:34:41.859629 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-9j4z7" podUID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" Dec 03 16:34:41 crc kubenswrapper[5002]: I1203 16:34:41.996462 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 16:34:41 crc kubenswrapper[5002]: I1203 16:34:41.997409 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:34:42 crc kubenswrapper[5002]: I1203 16:34:42.008976 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 16:34:42 crc kubenswrapper[5002]: I1203 16:34:42.079520 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-kube-api-access\") pod \"installer-9-crc\" (UID: \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:34:42 crc kubenswrapper[5002]: I1203 16:34:42.080259 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-kubelet-dir\") pod \"installer-9-crc\" (UID: \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:34:42 crc kubenswrapper[5002]: I1203 16:34:42.080290 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-var-lock\") pod \"installer-9-crc\" (UID: \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:34:42 crc kubenswrapper[5002]: I1203 16:34:42.181840 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-kubelet-dir\") pod \"installer-9-crc\" (UID: \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:34:42 crc kubenswrapper[5002]: I1203 16:34:42.181903 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-var-lock\") pod \"installer-9-crc\" (UID: \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:34:42 crc kubenswrapper[5002]: I1203 16:34:42.181930 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-kube-api-access\") pod \"installer-9-crc\" (UID: \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:34:42 crc kubenswrapper[5002]: I1203 16:34:42.182392 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-kubelet-dir\") pod \"installer-9-crc\" (UID: \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:34:42 crc kubenswrapper[5002]: I1203 16:34:42.182443 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-var-lock\") pod \"installer-9-crc\" (UID: \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:34:42 crc kubenswrapper[5002]: I1203 16:34:42.202567 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-kube-api-access\") pod \"installer-9-crc\" (UID: \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:34:42 crc kubenswrapper[5002]: I1203 16:34:42.325319 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:34:42 crc kubenswrapper[5002]: E1203 16:34:42.993181 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-9j4z7" podUID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" Dec 03 16:34:42 crc kubenswrapper[5002]: E1203 16:34:42.993814 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-gtbgz" podUID="772ffd36-1d82-4493-96bd-09b67515116f" Dec 03 16:34:42 crc kubenswrapper[5002]: E1203 16:34:42.993877 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-qkg9k" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" Dec 03 16:34:43 crc kubenswrapper[5002]: E1203 16:34:43.084164 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 03 16:34:43 crc kubenswrapper[5002]: E1203 16:34:43.084554 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2ppcw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-9wqft_openshift-marketplace(04e2999b-f568-479e-b811-44a0d9082524): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:34:43 crc kubenswrapper[5002]: E1203 16:34:43.085900 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-9wqft" podUID="04e2999b-f568-479e-b811-44a0d9082524" Dec 03 16:34:43 crc kubenswrapper[5002]: E1203 16:34:43.122900 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 03 16:34:43 crc kubenswrapper[5002]: E1203 16:34:43.123121 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2tn5c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-2rrfp_openshift-marketplace(cc2070de-fd9c-401b-9978-70c2fe35c939): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:34:43 crc kubenswrapper[5002]: E1203 16:34:43.124586 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-2rrfp" podUID="cc2070de-fd9c-401b-9978-70c2fe35c939" Dec 03 16:34:43 crc kubenswrapper[5002]: E1203 16:34:43.151910 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 03 16:34:43 crc kubenswrapper[5002]: E1203 16:34:43.152219 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4ds59,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-scrwq_openshift-marketplace(78b49351-215c-44ad-b9f3-13090f994617): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:34:43 crc kubenswrapper[5002]: E1203 16:34:43.153451 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-scrwq" podUID="78b49351-215c-44ad-b9f3-13090f994617" Dec 03 16:34:43 crc kubenswrapper[5002]: E1203 16:34:43.222742 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-scrwq" podUID="78b49351-215c-44ad-b9f3-13090f994617" Dec 03 16:34:43 crc kubenswrapper[5002]: E1203 16:34:43.223376 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-2rrfp" podUID="cc2070de-fd9c-401b-9978-70c2fe35c939" Dec 03 16:34:43 crc kubenswrapper[5002]: I1203 16:34:43.469528 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 16:34:43 crc kubenswrapper[5002]: I1203 16:34:43.543849 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 16:34:44 crc kubenswrapper[5002]: I1203 16:34:44.228597 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83","Type":"ContainerStarted","Data":"03d0f90923e5c657194e2f3303a1916f6402e1e84030fda54bfd6579cc89f3c2"} Dec 03 16:34:44 crc kubenswrapper[5002]: I1203 16:34:44.229190 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83","Type":"ContainerStarted","Data":"a8d86e715eecf249f0550b8f30aaf29bc1f435d09670448c48a3ad9403646adc"} Dec 03 16:34:44 crc kubenswrapper[5002]: I1203 16:34:44.231706 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" event={"ID":"24141739-e7a8-40cf-ab9e-267ee876230b","Type":"ContainerStarted","Data":"bc8033e5b806bc2c34d20d3498fb545c7f2c538b18908090cf7de897d4dc14cf"} Dec 03 16:34:44 crc kubenswrapper[5002]: I1203 16:34:44.231871 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-c7qvw" event={"ID":"24141739-e7a8-40cf-ab9e-267ee876230b","Type":"ContainerStarted","Data":"fa1fe5901face94b439870d3ae1db7b7f16d1f00915adeba0dbc9d0d783b8a6d"} Dec 03 16:34:44 crc kubenswrapper[5002]: I1203 16:34:44.233795 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c","Type":"ContainerStarted","Data":"cbed3b7849d9231e9e074c6cf4b0b9cc74e7d566b12e5559dd4f6998dc60b127"} Dec 03 16:34:44 crc kubenswrapper[5002]: I1203 16:34:44.233855 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c","Type":"ContainerStarted","Data":"a9a7aad6902507879ff015e7a77be3c58b160fb59bbe6feb8c19f6277852fb61"} Dec 03 16:34:44 crc kubenswrapper[5002]: I1203 16:34:44.252641 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=8.252611629 podStartE2EDuration="8.252611629s" podCreationTimestamp="2025-12-03 16:34:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:34:44.245811742 +0000 UTC m=+207.659633670" watchObservedRunningTime="2025-12-03 16:34:44.252611629 +0000 UTC m=+207.666433517" Dec 03 16:34:44 crc kubenswrapper[5002]: I1203 16:34:44.272776 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=3.272733272 podStartE2EDuration="3.272733272s" podCreationTimestamp="2025-12-03 16:34:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:34:44.268261579 +0000 UTC m=+207.682083477" watchObservedRunningTime="2025-12-03 16:34:44.272733272 +0000 UTC m=+207.686555160" Dec 03 16:34:45 crc kubenswrapper[5002]: I1203 16:34:45.244049 5002 generic.go:334] "Generic (PLEG): container finished" podID="70e4fadf-2e10-4d33-81c0-b9bdeda0bf83" containerID="03d0f90923e5c657194e2f3303a1916f6402e1e84030fda54bfd6579cc89f3c2" exitCode=0 Dec 03 16:34:45 crc kubenswrapper[5002]: I1203 16:34:45.244174 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83","Type":"ContainerDied","Data":"03d0f90923e5c657194e2f3303a1916f6402e1e84030fda54bfd6579cc89f3c2"} Dec 03 16:34:45 crc kubenswrapper[5002]: I1203 16:34:45.274947 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-c7qvw" podStartSLOduration=183.274918276 podStartE2EDuration="3m3.274918276s" podCreationTimestamp="2025-12-03 16:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:34:44.291322843 +0000 UTC m=+207.705144751" watchObservedRunningTime="2025-12-03 16:34:45.274918276 +0000 UTC m=+208.688740174" Dec 03 16:34:46 crc kubenswrapper[5002]: I1203 16:34:46.518340 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:34:46 crc kubenswrapper[5002]: I1203 16:34:46.550713 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/70e4fadf-2e10-4d33-81c0-b9bdeda0bf83-kubelet-dir\") pod \"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83\" (UID: \"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83\") " Dec 03 16:34:46 crc kubenswrapper[5002]: I1203 16:34:46.550938 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70e4fadf-2e10-4d33-81c0-b9bdeda0bf83-kube-api-access\") pod \"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83\" (UID: \"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83\") " Dec 03 16:34:46 crc kubenswrapper[5002]: I1203 16:34:46.558825 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70e4fadf-2e10-4d33-81c0-b9bdeda0bf83-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "70e4fadf-2e10-4d33-81c0-b9bdeda0bf83" (UID: "70e4fadf-2e10-4d33-81c0-b9bdeda0bf83"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:34:46 crc kubenswrapper[5002]: I1203 16:34:46.559258 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/70e4fadf-2e10-4d33-81c0-b9bdeda0bf83-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "70e4fadf-2e10-4d33-81c0-b9bdeda0bf83" (UID: "70e4fadf-2e10-4d33-81c0-b9bdeda0bf83"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:34:46 crc kubenswrapper[5002]: I1203 16:34:46.652783 5002 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/70e4fadf-2e10-4d33-81c0-b9bdeda0bf83-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:46 crc kubenswrapper[5002]: I1203 16:34:46.652832 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70e4fadf-2e10-4d33-81c0-b9bdeda0bf83-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:47 crc kubenswrapper[5002]: I1203 16:34:47.270445 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"70e4fadf-2e10-4d33-81c0-b9bdeda0bf83","Type":"ContainerDied","Data":"a8d86e715eecf249f0550b8f30aaf29bc1f435d09670448c48a3ad9403646adc"} Dec 03 16:34:47 crc kubenswrapper[5002]: I1203 16:34:47.270502 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:34:47 crc kubenswrapper[5002]: I1203 16:34:47.270527 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8d86e715eecf249f0550b8f30aaf29bc1f435d09670448c48a3ad9403646adc" Dec 03 16:34:50 crc kubenswrapper[5002]: I1203 16:34:50.916964 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:34:50 crc kubenswrapper[5002]: I1203 16:34:50.917561 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:34:50 crc kubenswrapper[5002]: I1203 16:34:50.918580 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:34:50 crc kubenswrapper[5002]: I1203 16:34:50.919246 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:34:50 crc kubenswrapper[5002]: I1203 16:34:50.919345 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5" gracePeriod=600 Dec 03 16:34:51 crc kubenswrapper[5002]: I1203 16:34:51.300117 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5" exitCode=0 Dec 03 16:34:51 crc kubenswrapper[5002]: I1203 16:34:51.300211 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5"} Dec 03 16:34:51 crc kubenswrapper[5002]: I1203 16:34:51.301077 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"1bbdabe3a3b817c3eac3b26805d605bfaebd6c3b6c1598575ca596c5e99f6ed6"} Dec 03 16:34:55 crc kubenswrapper[5002]: I1203 16:34:55.327569 5002 generic.go:334] "Generic (PLEG): container finished" podID="78b49351-215c-44ad-b9f3-13090f994617" containerID="cc57a81f4a8091f0bd5efbfd37f3320cd7d2fa69a3d05e21abb3187eaee220f5" exitCode=0 Dec 03 16:34:55 crc kubenswrapper[5002]: I1203 16:34:55.327661 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scrwq" event={"ID":"78b49351-215c-44ad-b9f3-13090f994617","Type":"ContainerDied","Data":"cc57a81f4a8091f0bd5efbfd37f3320cd7d2fa69a3d05e21abb3187eaee220f5"} Dec 03 16:34:56 crc kubenswrapper[5002]: I1203 16:34:56.341319 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scrwq" event={"ID":"78b49351-215c-44ad-b9f3-13090f994617","Type":"ContainerStarted","Data":"8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca"} Dec 03 16:34:56 crc kubenswrapper[5002]: I1203 16:34:56.363311 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-scrwq" podStartSLOduration=4.189660594 podStartE2EDuration="1m1.363283548s" podCreationTimestamp="2025-12-03 16:33:55 +0000 UTC" firstStartedPulling="2025-12-03 16:33:58.554910761 +0000 UTC m=+161.968732649" lastFinishedPulling="2025-12-03 16:34:55.728533715 +0000 UTC m=+219.142355603" observedRunningTime="2025-12-03 16:34:56.359395071 +0000 UTC m=+219.773216969" watchObservedRunningTime="2025-12-03 16:34:56.363283548 +0000 UTC m=+219.777105436" Dec 03 16:34:57 crc kubenswrapper[5002]: I1203 16:34:57.349883 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qkg9k" event={"ID":"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c","Type":"ContainerStarted","Data":"36fe06de203b171d7cbb8d5d688693725ac6c5da3802b32db40505f2dddd3a56"} Dec 03 16:34:57 crc kubenswrapper[5002]: I1203 16:34:57.352109 5002 generic.go:334] "Generic (PLEG): container finished" podID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" containerID="8f23fd6a289fbcb20c0e9c14f761d7331170ee9857bc74ea94c79f569d8ba8c2" exitCode=0 Dec 03 16:34:57 crc kubenswrapper[5002]: I1203 16:34:57.352194 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwj6" event={"ID":"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0","Type":"ContainerDied","Data":"8f23fd6a289fbcb20c0e9c14f761d7331170ee9857bc74ea94c79f569d8ba8c2"} Dec 03 16:34:57 crc kubenswrapper[5002]: I1203 16:34:57.355065 5002 generic.go:334] "Generic (PLEG): container finished" podID="772ffd36-1d82-4493-96bd-09b67515116f" containerID="f32204220e5f80511041ea4f9cc9b531f31b783e5f8c721051269bfd5f418cd3" exitCode=0 Dec 03 16:34:57 crc kubenswrapper[5002]: I1203 16:34:57.355140 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtbgz" event={"ID":"772ffd36-1d82-4493-96bd-09b67515116f","Type":"ContainerDied","Data":"f32204220e5f80511041ea4f9cc9b531f31b783e5f8c721051269bfd5f418cd3"} Dec 03 16:34:57 crc kubenswrapper[5002]: I1203 16:34:57.359954 5002 generic.go:334] "Generic (PLEG): container finished" podID="cc2070de-fd9c-401b-9978-70c2fe35c939" containerID="744bf1aae9e1658d5f61e833c17778997490bdfa53dc9b1899d90fcb6523e820" exitCode=0 Dec 03 16:34:57 crc kubenswrapper[5002]: I1203 16:34:57.360030 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2rrfp" event={"ID":"cc2070de-fd9c-401b-9978-70c2fe35c939","Type":"ContainerDied","Data":"744bf1aae9e1658d5f61e833c17778997490bdfa53dc9b1899d90fcb6523e820"} Dec 03 16:34:57 crc kubenswrapper[5002]: I1203 16:34:57.363541 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9j4z7" event={"ID":"6b80fc07-401e-4ddd-8a97-c66cc66d68b5","Type":"ContainerStarted","Data":"0c1057970f9718f6b12ecebd0479ea20715b3391aa8439020886bc41f36391c3"} Dec 03 16:34:58 crc kubenswrapper[5002]: I1203 16:34:58.370373 5002 generic.go:334] "Generic (PLEG): container finished" podID="04e2999b-f568-479e-b811-44a0d9082524" containerID="7af431e7f8ba1d1ed3b406f384846ad8018f34ba3a09a7cb9242fc0ca88367b2" exitCode=0 Dec 03 16:34:58 crc kubenswrapper[5002]: I1203 16:34:58.370456 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9wqft" event={"ID":"04e2999b-f568-479e-b811-44a0d9082524","Type":"ContainerDied","Data":"7af431e7f8ba1d1ed3b406f384846ad8018f34ba3a09a7cb9242fc0ca88367b2"} Dec 03 16:34:58 crc kubenswrapper[5002]: I1203 16:34:58.372673 5002 generic.go:334] "Generic (PLEG): container finished" podID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" containerID="0c1057970f9718f6b12ecebd0479ea20715b3391aa8439020886bc41f36391c3" exitCode=0 Dec 03 16:34:58 crc kubenswrapper[5002]: I1203 16:34:58.372727 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9j4z7" event={"ID":"6b80fc07-401e-4ddd-8a97-c66cc66d68b5","Type":"ContainerDied","Data":"0c1057970f9718f6b12ecebd0479ea20715b3391aa8439020886bc41f36391c3"} Dec 03 16:34:58 crc kubenswrapper[5002]: I1203 16:34:58.377452 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7b6mw" event={"ID":"889a250f-9618-48a9-b381-68056983907b","Type":"ContainerStarted","Data":"1d5113a55d3c7f8c006746c22bb4be1220985a478ca9b4b22e6132a41b1edf2b"} Dec 03 16:34:58 crc kubenswrapper[5002]: I1203 16:34:58.380112 5002 generic.go:334] "Generic (PLEG): container finished" podID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" containerID="36fe06de203b171d7cbb8d5d688693725ac6c5da3802b32db40505f2dddd3a56" exitCode=0 Dec 03 16:34:58 crc kubenswrapper[5002]: I1203 16:34:58.380147 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qkg9k" event={"ID":"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c","Type":"ContainerDied","Data":"36fe06de203b171d7cbb8d5d688693725ac6c5da3802b32db40505f2dddd3a56"} Dec 03 16:34:59 crc kubenswrapper[5002]: I1203 16:34:59.388137 5002 generic.go:334] "Generic (PLEG): container finished" podID="889a250f-9618-48a9-b381-68056983907b" containerID="1d5113a55d3c7f8c006746c22bb4be1220985a478ca9b4b22e6132a41b1edf2b" exitCode=0 Dec 03 16:34:59 crc kubenswrapper[5002]: I1203 16:34:59.388212 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7b6mw" event={"ID":"889a250f-9618-48a9-b381-68056983907b","Type":"ContainerDied","Data":"1d5113a55d3c7f8c006746c22bb4be1220985a478ca9b4b22e6132a41b1edf2b"} Dec 03 16:35:03 crc kubenswrapper[5002]: I1203 16:35:03.414207 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtbgz" event={"ID":"772ffd36-1d82-4493-96bd-09b67515116f","Type":"ContainerStarted","Data":"8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130"} Dec 03 16:35:03 crc kubenswrapper[5002]: I1203 16:35:03.438094 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gtbgz" podStartSLOduration=5.362324197 podStartE2EDuration="1m10.438074071s" podCreationTimestamp="2025-12-03 16:33:53 +0000 UTC" firstStartedPulling="2025-12-03 16:33:57.41755035 +0000 UTC m=+160.831372238" lastFinishedPulling="2025-12-03 16:35:02.493300224 +0000 UTC m=+225.907122112" observedRunningTime="2025-12-03 16:35:03.436506729 +0000 UTC m=+226.850328617" watchObservedRunningTime="2025-12-03 16:35:03.438074071 +0000 UTC m=+226.851895979" Dec 03 16:35:03 crc kubenswrapper[5002]: I1203 16:35:03.826324 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cfssq"] Dec 03 16:35:04 crc kubenswrapper[5002]: I1203 16:35:04.220828 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:35:04 crc kubenswrapper[5002]: I1203 16:35:04.220904 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:35:05 crc kubenswrapper[5002]: I1203 16:35:05.475863 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-gtbgz" podUID="772ffd36-1d82-4493-96bd-09b67515116f" containerName="registry-server" probeResult="failure" output=< Dec 03 16:35:05 crc kubenswrapper[5002]: timeout: failed to connect service ":50051" within 1s Dec 03 16:35:05 crc kubenswrapper[5002]: > Dec 03 16:35:06 crc kubenswrapper[5002]: I1203 16:35:06.254170 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:35:06 crc kubenswrapper[5002]: I1203 16:35:06.254260 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:35:06 crc kubenswrapper[5002]: I1203 16:35:06.389280 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:35:06 crc kubenswrapper[5002]: I1203 16:35:06.466567 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:35:06 crc kubenswrapper[5002]: I1203 16:35:06.617469 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-scrwq"] Dec 03 16:35:08 crc kubenswrapper[5002]: I1203 16:35:08.442432 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-scrwq" podUID="78b49351-215c-44ad-b9f3-13090f994617" containerName="registry-server" containerID="cri-o://8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca" gracePeriod=2 Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.133905 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.199459 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ds59\" (UniqueName: \"kubernetes.io/projected/78b49351-215c-44ad-b9f3-13090f994617-kube-api-access-4ds59\") pod \"78b49351-215c-44ad-b9f3-13090f994617\" (UID: \"78b49351-215c-44ad-b9f3-13090f994617\") " Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.199913 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78b49351-215c-44ad-b9f3-13090f994617-catalog-content\") pod \"78b49351-215c-44ad-b9f3-13090f994617\" (UID: \"78b49351-215c-44ad-b9f3-13090f994617\") " Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.199943 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78b49351-215c-44ad-b9f3-13090f994617-utilities\") pod \"78b49351-215c-44ad-b9f3-13090f994617\" (UID: \"78b49351-215c-44ad-b9f3-13090f994617\") " Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.201136 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78b49351-215c-44ad-b9f3-13090f994617-utilities" (OuterVolumeSpecName: "utilities") pod "78b49351-215c-44ad-b9f3-13090f994617" (UID: "78b49351-215c-44ad-b9f3-13090f994617"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.208363 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78b49351-215c-44ad-b9f3-13090f994617-kube-api-access-4ds59" (OuterVolumeSpecName: "kube-api-access-4ds59") pod "78b49351-215c-44ad-b9f3-13090f994617" (UID: "78b49351-215c-44ad-b9f3-13090f994617"). InnerVolumeSpecName "kube-api-access-4ds59". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.223147 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78b49351-215c-44ad-b9f3-13090f994617-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78b49351-215c-44ad-b9f3-13090f994617" (UID: "78b49351-215c-44ad-b9f3-13090f994617"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.300999 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78b49351-215c-44ad-b9f3-13090f994617-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.301033 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78b49351-215c-44ad-b9f3-13090f994617-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.301043 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ds59\" (UniqueName: \"kubernetes.io/projected/78b49351-215c-44ad-b9f3-13090f994617-kube-api-access-4ds59\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.461340 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9wqft" event={"ID":"04e2999b-f568-479e-b811-44a0d9082524","Type":"ContainerStarted","Data":"d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961"} Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.469716 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2rrfp" event={"ID":"cc2070de-fd9c-401b-9978-70c2fe35c939","Type":"ContainerStarted","Data":"2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc"} Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.472510 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9j4z7" event={"ID":"6b80fc07-401e-4ddd-8a97-c66cc66d68b5","Type":"ContainerStarted","Data":"abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51"} Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.474908 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7b6mw" event={"ID":"889a250f-9618-48a9-b381-68056983907b","Type":"ContainerStarted","Data":"e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244"} Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.477066 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qkg9k" event={"ID":"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c","Type":"ContainerStarted","Data":"36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6"} Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.479385 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwj6" event={"ID":"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0","Type":"ContainerStarted","Data":"df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a"} Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.483962 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9wqft" podStartSLOduration=4.078951044 podStartE2EDuration="1m14.483942946s" podCreationTimestamp="2025-12-03 16:33:55 +0000 UTC" firstStartedPulling="2025-12-03 16:33:58.555617371 +0000 UTC m=+161.969439259" lastFinishedPulling="2025-12-03 16:35:08.960609243 +0000 UTC m=+232.374431161" observedRunningTime="2025-12-03 16:35:09.483342489 +0000 UTC m=+232.897164377" watchObservedRunningTime="2025-12-03 16:35:09.483942946 +0000 UTC m=+232.897764844" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.492131 5002 generic.go:334] "Generic (PLEG): container finished" podID="78b49351-215c-44ad-b9f3-13090f994617" containerID="8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca" exitCode=0 Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.492195 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scrwq" event={"ID":"78b49351-215c-44ad-b9f3-13090f994617","Type":"ContainerDied","Data":"8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca"} Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.492224 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scrwq" event={"ID":"78b49351-215c-44ad-b9f3-13090f994617","Type":"ContainerDied","Data":"1cbb7906f6cfd0be674180a1f47f30390d77a26a32baacc5ffb2d3178fe38f51"} Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.492246 5002 scope.go:117] "RemoveContainer" containerID="8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.492249 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scrwq" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.504523 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2rrfp" podStartSLOduration=5.082957038 podStartE2EDuration="1m16.50450331s" podCreationTimestamp="2025-12-03 16:33:53 +0000 UTC" firstStartedPulling="2025-12-03 16:33:57.502568098 +0000 UTC m=+160.916389986" lastFinishedPulling="2025-12-03 16:35:08.92411437 +0000 UTC m=+232.337936258" observedRunningTime="2025-12-03 16:35:09.50303232 +0000 UTC m=+232.916854218" watchObservedRunningTime="2025-12-03 16:35:09.50450331 +0000 UTC m=+232.918325198" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.509552 5002 scope.go:117] "RemoveContainer" containerID="cc57a81f4a8091f0bd5efbfd37f3320cd7d2fa69a3d05e21abb3187eaee220f5" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.525200 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qkg9k" podStartSLOduration=2.010852292 podStartE2EDuration="1m12.525180818s" podCreationTimestamp="2025-12-03 16:33:57 +0000 UTC" firstStartedPulling="2025-12-03 16:33:58.542601072 +0000 UTC m=+161.956422960" lastFinishedPulling="2025-12-03 16:35:09.056929598 +0000 UTC m=+232.470751486" observedRunningTime="2025-12-03 16:35:09.522173146 +0000 UTC m=+232.935995034" watchObservedRunningTime="2025-12-03 16:35:09.525180818 +0000 UTC m=+232.939002706" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.541186 5002 scope.go:117] "RemoveContainer" containerID="b9b6d9681818a24273cbb1fed595b86cee5a90001eaca92b48ce88da6538f27a" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.564924 5002 scope.go:117] "RemoveContainer" containerID="8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca" Dec 03 16:35:09 crc kubenswrapper[5002]: E1203 16:35:09.565604 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca\": container with ID starting with 8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca not found: ID does not exist" containerID="8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.565665 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca"} err="failed to get container status \"8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca\": rpc error: code = NotFound desc = could not find container \"8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca\": container with ID starting with 8df7087ffa7d42bfb974a7362473b687dc3416eeaff639fde298475fb0a695ca not found: ID does not exist" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.565700 5002 scope.go:117] "RemoveContainer" containerID="cc57a81f4a8091f0bd5efbfd37f3320cd7d2fa69a3d05e21abb3187eaee220f5" Dec 03 16:35:09 crc kubenswrapper[5002]: E1203 16:35:09.566133 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc57a81f4a8091f0bd5efbfd37f3320cd7d2fa69a3d05e21abb3187eaee220f5\": container with ID starting with cc57a81f4a8091f0bd5efbfd37f3320cd7d2fa69a3d05e21abb3187eaee220f5 not found: ID does not exist" containerID="cc57a81f4a8091f0bd5efbfd37f3320cd7d2fa69a3d05e21abb3187eaee220f5" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.566179 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc57a81f4a8091f0bd5efbfd37f3320cd7d2fa69a3d05e21abb3187eaee220f5"} err="failed to get container status \"cc57a81f4a8091f0bd5efbfd37f3320cd7d2fa69a3d05e21abb3187eaee220f5\": rpc error: code = NotFound desc = could not find container \"cc57a81f4a8091f0bd5efbfd37f3320cd7d2fa69a3d05e21abb3187eaee220f5\": container with ID starting with cc57a81f4a8091f0bd5efbfd37f3320cd7d2fa69a3d05e21abb3187eaee220f5 not found: ID does not exist" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.566212 5002 scope.go:117] "RemoveContainer" containerID="b9b6d9681818a24273cbb1fed595b86cee5a90001eaca92b48ce88da6538f27a" Dec 03 16:35:09 crc kubenswrapper[5002]: E1203 16:35:09.566467 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9b6d9681818a24273cbb1fed595b86cee5a90001eaca92b48ce88da6538f27a\": container with ID starting with b9b6d9681818a24273cbb1fed595b86cee5a90001eaca92b48ce88da6538f27a not found: ID does not exist" containerID="b9b6d9681818a24273cbb1fed595b86cee5a90001eaca92b48ce88da6538f27a" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.566493 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9b6d9681818a24273cbb1fed595b86cee5a90001eaca92b48ce88da6538f27a"} err="failed to get container status \"b9b6d9681818a24273cbb1fed595b86cee5a90001eaca92b48ce88da6538f27a\": rpc error: code = NotFound desc = could not find container \"b9b6d9681818a24273cbb1fed595b86cee5a90001eaca92b48ce88da6538f27a\": container with ID starting with b9b6d9681818a24273cbb1fed595b86cee5a90001eaca92b48ce88da6538f27a not found: ID does not exist" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.572320 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7b6mw" podStartSLOduration=4.99376258 podStartE2EDuration="1m16.572291213s" podCreationTimestamp="2025-12-03 16:33:53 +0000 UTC" firstStartedPulling="2025-12-03 16:33:57.412787423 +0000 UTC m=+160.826609311" lastFinishedPulling="2025-12-03 16:35:08.991316046 +0000 UTC m=+232.405137944" observedRunningTime="2025-12-03 16:35:09.571699916 +0000 UTC m=+232.985521824" watchObservedRunningTime="2025-12-03 16:35:09.572291213 +0000 UTC m=+232.986113101" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.574465 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6jwj6" podStartSLOduration=7.188451232 podStartE2EDuration="1m15.574456122s" podCreationTimestamp="2025-12-03 16:33:54 +0000 UTC" firstStartedPulling="2025-12-03 16:33:57.438001179 +0000 UTC m=+160.851823067" lastFinishedPulling="2025-12-03 16:35:05.824006069 +0000 UTC m=+229.237827957" observedRunningTime="2025-12-03 16:35:09.550433522 +0000 UTC m=+232.964255430" watchObservedRunningTime="2025-12-03 16:35:09.574456122 +0000 UTC m=+232.988278010" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.615535 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9j4z7" podStartSLOduration=3.240496974 podStartE2EDuration="1m13.6155126s" podCreationTimestamp="2025-12-03 16:33:56 +0000 UTC" firstStartedPulling="2025-12-03 16:33:58.523039328 +0000 UTC m=+161.936861216" lastFinishedPulling="2025-12-03 16:35:08.898054944 +0000 UTC m=+232.311876842" observedRunningTime="2025-12-03 16:35:09.603797947 +0000 UTC m=+233.017619835" watchObservedRunningTime="2025-12-03 16:35:09.6155126 +0000 UTC m=+233.029334488" Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.618639 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-scrwq"] Dec 03 16:35:09 crc kubenswrapper[5002]: I1203 16:35:09.621963 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-scrwq"] Dec 03 16:35:10 crc kubenswrapper[5002]: I1203 16:35:10.847499 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78b49351-215c-44ad-b9f3-13090f994617" path="/var/lib/kubelet/pods/78b49351-215c-44ad-b9f3-13090f994617/volumes" Dec 03 16:35:14 crc kubenswrapper[5002]: I1203 16:35:14.220234 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:35:14 crc kubenswrapper[5002]: I1203 16:35:14.220885 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:35:14 crc kubenswrapper[5002]: I1203 16:35:14.268998 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:35:14 crc kubenswrapper[5002]: I1203 16:35:14.315274 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:35:14 crc kubenswrapper[5002]: I1203 16:35:14.452233 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:35:14 crc kubenswrapper[5002]: I1203 16:35:14.566502 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:35:14 crc kubenswrapper[5002]: I1203 16:35:14.652275 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:35:14 crc kubenswrapper[5002]: I1203 16:35:14.652346 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:35:14 crc kubenswrapper[5002]: I1203 16:35:14.652367 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:35:14 crc kubenswrapper[5002]: I1203 16:35:14.652379 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:35:14 crc kubenswrapper[5002]: I1203 16:35:14.692986 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:35:14 crc kubenswrapper[5002]: I1203 16:35:14.693123 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:35:15 crc kubenswrapper[5002]: I1203 16:35:15.571921 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:35:15 crc kubenswrapper[5002]: I1203 16:35:15.576847 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:35:15 crc kubenswrapper[5002]: I1203 16:35:15.864386 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:35:15 crc kubenswrapper[5002]: I1203 16:35:15.864779 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:35:15 crc kubenswrapper[5002]: I1203 16:35:15.905138 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:35:16 crc kubenswrapper[5002]: I1203 16:35:16.597929 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:35:16 crc kubenswrapper[5002]: I1203 16:35:16.623887 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6jwj6"] Dec 03 16:35:16 crc kubenswrapper[5002]: I1203 16:35:16.888937 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:35:16 crc kubenswrapper[5002]: I1203 16:35:16.889002 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:35:16 crc kubenswrapper[5002]: I1203 16:35:16.935627 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:35:17 crc kubenswrapper[5002]: I1203 16:35:17.380271 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:35:17 crc kubenswrapper[5002]: I1203 16:35:17.380342 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:35:17 crc kubenswrapper[5002]: I1203 16:35:17.454658 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:35:17 crc kubenswrapper[5002]: I1203 16:35:17.542474 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6jwj6" podUID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" containerName="registry-server" containerID="cri-o://df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a" gracePeriod=2 Dec 03 16:35:17 crc kubenswrapper[5002]: I1203 16:35:17.583482 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:35:17 crc kubenswrapper[5002]: I1203 16:35:17.598657 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:35:17 crc kubenswrapper[5002]: I1203 16:35:17.621721 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7b6mw"] Dec 03 16:35:17 crc kubenswrapper[5002]: I1203 16:35:17.622060 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7b6mw" podUID="889a250f-9618-48a9-b381-68056983907b" containerName="registry-server" containerID="cri-o://e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244" gracePeriod=2 Dec 03 16:35:17 crc kubenswrapper[5002]: I1203 16:35:17.910672 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.025198 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.027164 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmpxg\" (UniqueName: \"kubernetes.io/projected/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-kube-api-access-rmpxg\") pod \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\" (UID: \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\") " Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.027364 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-utilities\") pod \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\" (UID: \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\") " Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.027398 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-catalog-content\") pod \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\" (UID: \"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0\") " Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.028313 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-utilities" (OuterVolumeSpecName: "utilities") pod "4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" (UID: "4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.034631 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-kube-api-access-rmpxg" (OuterVolumeSpecName: "kube-api-access-rmpxg") pod "4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" (UID: "4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0"). InnerVolumeSpecName "kube-api-access-rmpxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.081451 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" (UID: "4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.129058 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/889a250f-9618-48a9-b381-68056983907b-catalog-content\") pod \"889a250f-9618-48a9-b381-68056983907b\" (UID: \"889a250f-9618-48a9-b381-68056983907b\") " Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.129179 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/889a250f-9618-48a9-b381-68056983907b-utilities\") pod \"889a250f-9618-48a9-b381-68056983907b\" (UID: \"889a250f-9618-48a9-b381-68056983907b\") " Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.129279 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqhz5\" (UniqueName: \"kubernetes.io/projected/889a250f-9618-48a9-b381-68056983907b-kube-api-access-fqhz5\") pod \"889a250f-9618-48a9-b381-68056983907b\" (UID: \"889a250f-9618-48a9-b381-68056983907b\") " Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.129542 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.129560 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.129575 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmpxg\" (UniqueName: \"kubernetes.io/projected/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0-kube-api-access-rmpxg\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.130081 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/889a250f-9618-48a9-b381-68056983907b-utilities" (OuterVolumeSpecName: "utilities") pod "889a250f-9618-48a9-b381-68056983907b" (UID: "889a250f-9618-48a9-b381-68056983907b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.132382 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/889a250f-9618-48a9-b381-68056983907b-kube-api-access-fqhz5" (OuterVolumeSpecName: "kube-api-access-fqhz5") pod "889a250f-9618-48a9-b381-68056983907b" (UID: "889a250f-9618-48a9-b381-68056983907b"). InnerVolumeSpecName "kube-api-access-fqhz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.174413 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/889a250f-9618-48a9-b381-68056983907b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "889a250f-9618-48a9-b381-68056983907b" (UID: "889a250f-9618-48a9-b381-68056983907b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.230625 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/889a250f-9618-48a9-b381-68056983907b-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.230674 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqhz5\" (UniqueName: \"kubernetes.io/projected/889a250f-9618-48a9-b381-68056983907b-kube-api-access-fqhz5\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.230685 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/889a250f-9618-48a9-b381-68056983907b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.552607 5002 generic.go:334] "Generic (PLEG): container finished" podID="889a250f-9618-48a9-b381-68056983907b" containerID="e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244" exitCode=0 Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.552718 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7b6mw" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.553084 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7b6mw" event={"ID":"889a250f-9618-48a9-b381-68056983907b","Type":"ContainerDied","Data":"e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244"} Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.553188 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7b6mw" event={"ID":"889a250f-9618-48a9-b381-68056983907b","Type":"ContainerDied","Data":"ed1587b17747e9a5e408ab17289660363a136765ea7bed32ea28839ab38ea08e"} Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.553256 5002 scope.go:117] "RemoveContainer" containerID="e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.559452 5002 generic.go:334] "Generic (PLEG): container finished" podID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" containerID="df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a" exitCode=0 Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.559516 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwj6" event={"ID":"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0","Type":"ContainerDied","Data":"df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a"} Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.559556 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jwj6" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.559596 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jwj6" event={"ID":"4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0","Type":"ContainerDied","Data":"77a2eb025fa6f4df906cd063418f07311b2e27d3eab9ce5d2b8b458ce10fbcec"} Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.574672 5002 scope.go:117] "RemoveContainer" containerID="1d5113a55d3c7f8c006746c22bb4be1220985a478ca9b4b22e6132a41b1edf2b" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.597947 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7b6mw"] Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.602257 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7b6mw"] Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.612508 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6jwj6"] Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.615414 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6jwj6"] Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.639380 5002 scope.go:117] "RemoveContainer" containerID="bbe4cebe7f1115c7264b3b73ffde8538fd9732418fc90283e8e9144280275b81" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.655977 5002 scope.go:117] "RemoveContainer" containerID="e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244" Dec 03 16:35:18 crc kubenswrapper[5002]: E1203 16:35:18.656296 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244\": container with ID starting with e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244 not found: ID does not exist" containerID="e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.656326 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244"} err="failed to get container status \"e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244\": rpc error: code = NotFound desc = could not find container \"e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244\": container with ID starting with e1296a362c9fcf02c8a90d71fc77cef031525f6a087e308c5c75da13b7f16244 not found: ID does not exist" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.656352 5002 scope.go:117] "RemoveContainer" containerID="1d5113a55d3c7f8c006746c22bb4be1220985a478ca9b4b22e6132a41b1edf2b" Dec 03 16:35:18 crc kubenswrapper[5002]: E1203 16:35:18.656556 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d5113a55d3c7f8c006746c22bb4be1220985a478ca9b4b22e6132a41b1edf2b\": container with ID starting with 1d5113a55d3c7f8c006746c22bb4be1220985a478ca9b4b22e6132a41b1edf2b not found: ID does not exist" containerID="1d5113a55d3c7f8c006746c22bb4be1220985a478ca9b4b22e6132a41b1edf2b" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.656583 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d5113a55d3c7f8c006746c22bb4be1220985a478ca9b4b22e6132a41b1edf2b"} err="failed to get container status \"1d5113a55d3c7f8c006746c22bb4be1220985a478ca9b4b22e6132a41b1edf2b\": rpc error: code = NotFound desc = could not find container \"1d5113a55d3c7f8c006746c22bb4be1220985a478ca9b4b22e6132a41b1edf2b\": container with ID starting with 1d5113a55d3c7f8c006746c22bb4be1220985a478ca9b4b22e6132a41b1edf2b not found: ID does not exist" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.656594 5002 scope.go:117] "RemoveContainer" containerID="bbe4cebe7f1115c7264b3b73ffde8538fd9732418fc90283e8e9144280275b81" Dec 03 16:35:18 crc kubenswrapper[5002]: E1203 16:35:18.657133 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbe4cebe7f1115c7264b3b73ffde8538fd9732418fc90283e8e9144280275b81\": container with ID starting with bbe4cebe7f1115c7264b3b73ffde8538fd9732418fc90283e8e9144280275b81 not found: ID does not exist" containerID="bbe4cebe7f1115c7264b3b73ffde8538fd9732418fc90283e8e9144280275b81" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.657156 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbe4cebe7f1115c7264b3b73ffde8538fd9732418fc90283e8e9144280275b81"} err="failed to get container status \"bbe4cebe7f1115c7264b3b73ffde8538fd9732418fc90283e8e9144280275b81\": rpc error: code = NotFound desc = could not find container \"bbe4cebe7f1115c7264b3b73ffde8538fd9732418fc90283e8e9144280275b81\": container with ID starting with bbe4cebe7f1115c7264b3b73ffde8538fd9732418fc90283e8e9144280275b81 not found: ID does not exist" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.657167 5002 scope.go:117] "RemoveContainer" containerID="df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.702968 5002 scope.go:117] "RemoveContainer" containerID="8f23fd6a289fbcb20c0e9c14f761d7331170ee9857bc74ea94c79f569d8ba8c2" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.728231 5002 scope.go:117] "RemoveContainer" containerID="198d18fd1068ac020af43001265e34b16a2b98bd165d7ae8990e0e130cb006e1" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.748024 5002 scope.go:117] "RemoveContainer" containerID="df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a" Dec 03 16:35:18 crc kubenswrapper[5002]: E1203 16:35:18.748553 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a\": container with ID starting with df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a not found: ID does not exist" containerID="df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.748587 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a"} err="failed to get container status \"df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a\": rpc error: code = NotFound desc = could not find container \"df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a\": container with ID starting with df601c5b7f99bf068fb0281ea6a9d52cbfe51b5109ef26402f896a8e0424551a not found: ID does not exist" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.748610 5002 scope.go:117] "RemoveContainer" containerID="8f23fd6a289fbcb20c0e9c14f761d7331170ee9857bc74ea94c79f569d8ba8c2" Dec 03 16:35:18 crc kubenswrapper[5002]: E1203 16:35:18.748958 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f23fd6a289fbcb20c0e9c14f761d7331170ee9857bc74ea94c79f569d8ba8c2\": container with ID starting with 8f23fd6a289fbcb20c0e9c14f761d7331170ee9857bc74ea94c79f569d8ba8c2 not found: ID does not exist" containerID="8f23fd6a289fbcb20c0e9c14f761d7331170ee9857bc74ea94c79f569d8ba8c2" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.749016 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f23fd6a289fbcb20c0e9c14f761d7331170ee9857bc74ea94c79f569d8ba8c2"} err="failed to get container status \"8f23fd6a289fbcb20c0e9c14f761d7331170ee9857bc74ea94c79f569d8ba8c2\": rpc error: code = NotFound desc = could not find container \"8f23fd6a289fbcb20c0e9c14f761d7331170ee9857bc74ea94c79f569d8ba8c2\": container with ID starting with 8f23fd6a289fbcb20c0e9c14f761d7331170ee9857bc74ea94c79f569d8ba8c2 not found: ID does not exist" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.749050 5002 scope.go:117] "RemoveContainer" containerID="198d18fd1068ac020af43001265e34b16a2b98bd165d7ae8990e0e130cb006e1" Dec 03 16:35:18 crc kubenswrapper[5002]: E1203 16:35:18.749622 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"198d18fd1068ac020af43001265e34b16a2b98bd165d7ae8990e0e130cb006e1\": container with ID starting with 198d18fd1068ac020af43001265e34b16a2b98bd165d7ae8990e0e130cb006e1 not found: ID does not exist" containerID="198d18fd1068ac020af43001265e34b16a2b98bd165d7ae8990e0e130cb006e1" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.749649 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"198d18fd1068ac020af43001265e34b16a2b98bd165d7ae8990e0e130cb006e1"} err="failed to get container status \"198d18fd1068ac020af43001265e34b16a2b98bd165d7ae8990e0e130cb006e1\": rpc error: code = NotFound desc = could not find container \"198d18fd1068ac020af43001265e34b16a2b98bd165d7ae8990e0e130cb006e1\": container with ID starting with 198d18fd1068ac020af43001265e34b16a2b98bd165d7ae8990e0e130cb006e1 not found: ID does not exist" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.848398 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" path="/var/lib/kubelet/pods/4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0/volumes" Dec 03 16:35:18 crc kubenswrapper[5002]: I1203 16:35:18.849137 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="889a250f-9618-48a9-b381-68056983907b" path="/var/lib/kubelet/pods/889a250f-9618-48a9-b381-68056983907b/volumes" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.020682 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qkg9k"] Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.020937 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qkg9k" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" containerName="registry-server" containerID="cri-o://36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6" gracePeriod=2 Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.442634 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.505338 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-catalog-content\") pod \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\" (UID: \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\") " Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.505466 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vw6cg\" (UniqueName: \"kubernetes.io/projected/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-kube-api-access-vw6cg\") pod \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\" (UID: \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\") " Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.505506 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-utilities\") pod \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\" (UID: \"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c\") " Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.506842 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-utilities" (OuterVolumeSpecName: "utilities") pod "b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" (UID: "b830fe22-e3e0-4de1-8aa4-8e16a3e9594c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.514975 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-kube-api-access-vw6cg" (OuterVolumeSpecName: "kube-api-access-vw6cg") pod "b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" (UID: "b830fe22-e3e0-4de1-8aa4-8e16a3e9594c"). InnerVolumeSpecName "kube-api-access-vw6cg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.595300 5002 generic.go:334] "Generic (PLEG): container finished" podID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" containerID="36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6" exitCode=0 Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.595354 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qkg9k" event={"ID":"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c","Type":"ContainerDied","Data":"36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6"} Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.595389 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qkg9k" event={"ID":"b830fe22-e3e0-4de1-8aa4-8e16a3e9594c","Type":"ContainerDied","Data":"26e15d370a82cbf9d3837aa3999e10d886771e7a4c3f4501876da4c270814024"} Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.595420 5002 scope.go:117] "RemoveContainer" containerID="36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.595567 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qkg9k" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.607491 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vw6cg\" (UniqueName: \"kubernetes.io/projected/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-kube-api-access-vw6cg\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.607520 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.615269 5002 scope.go:117] "RemoveContainer" containerID="36fe06de203b171d7cbb8d5d688693725ac6c5da3802b32db40505f2dddd3a56" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.635268 5002 scope.go:117] "RemoveContainer" containerID="2bb6fb883dd4aa2bc30d4a43fb5c5602d8ae254db6575396683570a8175f7eb9" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.641707 5002 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642088 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" containerName="extract-content" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642111 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" containerName="extract-content" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642132 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78b49351-215c-44ad-b9f3-13090f994617" containerName="registry-server" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642140 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="78b49351-215c-44ad-b9f3-13090f994617" containerName="registry-server" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642151 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="889a250f-9618-48a9-b381-68056983907b" containerName="registry-server" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642159 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="889a250f-9618-48a9-b381-68056983907b" containerName="registry-server" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642169 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" containerName="extract-content" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642176 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" containerName="extract-content" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642185 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70e4fadf-2e10-4d33-81c0-b9bdeda0bf83" containerName="pruner" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642192 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="70e4fadf-2e10-4d33-81c0-b9bdeda0bf83" containerName="pruner" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642268 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="889a250f-9618-48a9-b381-68056983907b" containerName="extract-utilities" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642277 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="889a250f-9618-48a9-b381-68056983907b" containerName="extract-utilities" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642286 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" containerName="extract-utilities" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642293 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" containerName="extract-utilities" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642302 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" containerName="extract-utilities" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642309 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" containerName="extract-utilities" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642317 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" containerName="registry-server" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642324 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" containerName="registry-server" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642339 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78b49351-215c-44ad-b9f3-13090f994617" containerName="extract-utilities" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642345 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="78b49351-215c-44ad-b9f3-13090f994617" containerName="extract-utilities" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642354 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="889a250f-9618-48a9-b381-68056983907b" containerName="extract-content" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642360 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="889a250f-9618-48a9-b381-68056983907b" containerName="extract-content" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642372 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78b49351-215c-44ad-b9f3-13090f994617" containerName="extract-content" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642379 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="78b49351-215c-44ad-b9f3-13090f994617" containerName="extract-content" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.642388 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" containerName="registry-server" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642393 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" containerName="registry-server" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642611 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" containerName="registry-server" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642623 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="70e4fadf-2e10-4d33-81c0-b9bdeda0bf83" containerName="pruner" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642630 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="889a250f-9618-48a9-b381-68056983907b" containerName="registry-server" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642640 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="78b49351-215c-44ad-b9f3-13090f994617" containerName="registry-server" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.642647 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bb8f125-ca9d-433c-9d3e-c9dfbdc702e0" containerName="registry-server" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.643082 5002 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.643239 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.643622 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe" gracePeriod=15 Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.643648 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5" gracePeriod=15 Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.643676 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99" gracePeriod=15 Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.643798 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273" gracePeriod=15 Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.643824 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4" gracePeriod=15 Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644033 5002 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.644210 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644230 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.644242 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644250 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.644263 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644271 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.644278 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644287 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.644305 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644313 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.644324 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644332 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.644344 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644352 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644494 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644509 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644528 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644538 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644550 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.644806 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.659043 5002 scope.go:117] "RemoveContainer" containerID="36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.659112 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" (UID: "b830fe22-e3e0-4de1-8aa4-8e16a3e9594c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.659652 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6\": container with ID starting with 36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6 not found: ID does not exist" containerID="36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.659694 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6"} err="failed to get container status \"36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6\": rpc error: code = NotFound desc = could not find container \"36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6\": container with ID starting with 36457ef0708a95bab7e290c006f57e0d7fb65c3b42c34938643a23efaff7aad6 not found: ID does not exist" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.659721 5002 scope.go:117] "RemoveContainer" containerID="36fe06de203b171d7cbb8d5d688693725ac6c5da3802b32db40505f2dddd3a56" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.667728 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36fe06de203b171d7cbb8d5d688693725ac6c5da3802b32db40505f2dddd3a56\": container with ID starting with 36fe06de203b171d7cbb8d5d688693725ac6c5da3802b32db40505f2dddd3a56 not found: ID does not exist" containerID="36fe06de203b171d7cbb8d5d688693725ac6c5da3802b32db40505f2dddd3a56" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.667787 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36fe06de203b171d7cbb8d5d688693725ac6c5da3802b32db40505f2dddd3a56"} err="failed to get container status \"36fe06de203b171d7cbb8d5d688693725ac6c5da3802b32db40505f2dddd3a56\": rpc error: code = NotFound desc = could not find container \"36fe06de203b171d7cbb8d5d688693725ac6c5da3802b32db40505f2dddd3a56\": container with ID starting with 36fe06de203b171d7cbb8d5d688693725ac6c5da3802b32db40505f2dddd3a56 not found: ID does not exist" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.667813 5002 scope.go:117] "RemoveContainer" containerID="2bb6fb883dd4aa2bc30d4a43fb5c5602d8ae254db6575396683570a8175f7eb9" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.676222 5002 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.155:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: E1203 16:35:21.677203 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bb6fb883dd4aa2bc30d4a43fb5c5602d8ae254db6575396683570a8175f7eb9\": container with ID starting with 2bb6fb883dd4aa2bc30d4a43fb5c5602d8ae254db6575396683570a8175f7eb9 not found: ID does not exist" containerID="2bb6fb883dd4aa2bc30d4a43fb5c5602d8ae254db6575396683570a8175f7eb9" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.677441 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bb6fb883dd4aa2bc30d4a43fb5c5602d8ae254db6575396683570a8175f7eb9"} err="failed to get container status \"2bb6fb883dd4aa2bc30d4a43fb5c5602d8ae254db6575396683570a8175f7eb9\": rpc error: code = NotFound desc = could not find container \"2bb6fb883dd4aa2bc30d4a43fb5c5602d8ae254db6575396683570a8175f7eb9\": container with ID starting with 2bb6fb883dd4aa2bc30d4a43fb5c5602d8ae254db6575396683570a8175f7eb9 not found: ID does not exist" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.708626 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.708674 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.708766 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.708798 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.708838 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.708867 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.708892 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.708911 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.708953 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.810786 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.810900 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.810977 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811000 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811062 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811093 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811102 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811059 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811097 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811199 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811205 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811241 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811337 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811375 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811407 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.811374 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.921818 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.922338 5002 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:21 crc kubenswrapper[5002]: I1203 16:35:21.978071 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:22 crc kubenswrapper[5002]: W1203 16:35:22.002872 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-846af49b1798731806cbc09ec2eea220f2cacff206037b58f088eb42df2e6562 WatchSource:0}: Error finding container 846af49b1798731806cbc09ec2eea220f2cacff206037b58f088eb42df2e6562: Status 404 returned error can't find the container with id 846af49b1798731806cbc09ec2eea220f2cacff206037b58f088eb42df2e6562 Dec 03 16:35:22 crc kubenswrapper[5002]: E1203 16:35:22.007206 5002 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.155:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187dc1ce95000f15 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 16:35:22.006335253 +0000 UTC m=+245.420157141,LastTimestamp:2025-12-03 16:35:22.006335253 +0000 UTC m=+245.420157141,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.604008 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.606584 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.607500 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5" exitCode=0 Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.607527 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273" exitCode=0 Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.607534 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99" exitCode=0 Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.607541 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4" exitCode=2 Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.607582 5002 scope.go:117] "RemoveContainer" containerID="836ddf9e8a4dbfdb703a1ef65f3c07534afcb83e957d88b20352420d753101f1" Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.610114 5002 generic.go:334] "Generic (PLEG): container finished" podID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" containerID="cbed3b7849d9231e9e074c6cf4b0b9cc74e7d566b12e5559dd4f6998dc60b127" exitCode=0 Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.610191 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c","Type":"ContainerDied","Data":"cbed3b7849d9231e9e074c6cf4b0b9cc74e7d566b12e5559dd4f6998dc60b127"} Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.610982 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.611322 5002 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.611556 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.611689 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"04a2b01a5bd89021c47b30f2d71524bd75d59863e10c7073a0150673a2112d64"} Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.611717 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"846af49b1798731806cbc09ec2eea220f2cacff206037b58f088eb42df2e6562"} Dec 03 16:35:22 crc kubenswrapper[5002]: E1203 16:35:22.612261 5002 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.155:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.612340 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.612736 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:22 crc kubenswrapper[5002]: I1203 16:35:22.613029 5002 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:23 crc kubenswrapper[5002]: I1203 16:35:23.625536 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 16:35:23 crc kubenswrapper[5002]: E1203 16:35:23.914934 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:35:23Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:35:23Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:35:23Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:35:23Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:23 crc kubenswrapper[5002]: E1203 16:35:23.916146 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:23 crc kubenswrapper[5002]: E1203 16:35:23.916513 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:23 crc kubenswrapper[5002]: E1203 16:35:23.916812 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:23 crc kubenswrapper[5002]: E1203 16:35:23.917029 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:23 crc kubenswrapper[5002]: E1203 16:35:23.917052 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.032814 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.033601 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.034070 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.039654 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.040574 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.041354 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.041809 5002 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.042348 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.148312 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.148405 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.148537 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-kube-api-access\") pod \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\" (UID: \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\") " Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.148583 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-var-lock\") pod \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\" (UID: \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\") " Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.148602 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.148622 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-var-lock" (OuterVolumeSpecName: "var-lock") pod "16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" (UID: "16c22cb6-76b9-45cf-bff8-5c82a4a54d0c"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.148634 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.148732 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.148797 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.148831 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-kubelet-dir\") pod \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\" (UID: \"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c\") " Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.148855 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" (UID: "16c22cb6-76b9-45cf-bff8-5c82a4a54d0c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.149114 5002 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.149133 5002 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.149142 5002 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.149150 5002 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.149159 5002 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.153683 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" (UID: "16c22cb6-76b9-45cf-bff8-5c82a4a54d0c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.251032 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/16c22cb6-76b9-45cf-bff8-5c82a4a54d0c-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.637292 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.638218 5002 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe" exitCode=0 Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.638306 5002 scope.go:117] "RemoveContainer" containerID="7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.638313 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.640365 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"16c22cb6-76b9-45cf-bff8-5c82a4a54d0c","Type":"ContainerDied","Data":"a9a7aad6902507879ff015e7a77be3c58b160fb59bbe6feb8c19f6277852fb61"} Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.640393 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9a7aad6902507879ff015e7a77be3c58b160fb59bbe6feb8c19f6277852fb61" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.640493 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.654209 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.655283 5002 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.655646 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.665138 5002 scope.go:117] "RemoveContainer" containerID="4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.669659 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.670231 5002 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.671946 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.684151 5002 scope.go:117] "RemoveContainer" containerID="afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.701537 5002 scope.go:117] "RemoveContainer" containerID="5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.720584 5002 scope.go:117] "RemoveContainer" containerID="bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.736024 5002 scope.go:117] "RemoveContainer" containerID="1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.775576 5002 scope.go:117] "RemoveContainer" containerID="7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5" Dec 03 16:35:24 crc kubenswrapper[5002]: E1203 16:35:24.776094 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\": container with ID starting with 7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5 not found: ID does not exist" containerID="7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.776456 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5"} err="failed to get container status \"7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\": rpc error: code = NotFound desc = could not find container \"7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5\": container with ID starting with 7816788b7ec9fd88e9d050159d04a29b8ffff5a1fd24117c65e00188256efeb5 not found: ID does not exist" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.776480 5002 scope.go:117] "RemoveContainer" containerID="4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273" Dec 03 16:35:24 crc kubenswrapper[5002]: E1203 16:35:24.777021 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\": container with ID starting with 4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273 not found: ID does not exist" containerID="4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.777077 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273"} err="failed to get container status \"4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\": rpc error: code = NotFound desc = could not find container \"4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273\": container with ID starting with 4995537558ecaebcc6f04dff08e3554b503045adfab0d7558598ed7348e29273 not found: ID does not exist" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.777112 5002 scope.go:117] "RemoveContainer" containerID="afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99" Dec 03 16:35:24 crc kubenswrapper[5002]: E1203 16:35:24.777388 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\": container with ID starting with afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99 not found: ID does not exist" containerID="afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.777411 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99"} err="failed to get container status \"afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\": rpc error: code = NotFound desc = could not find container \"afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99\": container with ID starting with afd8b12ef3051b4b23e485013966726ac1fabdc9f4cd3a6fe76de11912ad0e99 not found: ID does not exist" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.777425 5002 scope.go:117] "RemoveContainer" containerID="5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4" Dec 03 16:35:24 crc kubenswrapper[5002]: E1203 16:35:24.777642 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\": container with ID starting with 5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4 not found: ID does not exist" containerID="5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.777677 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4"} err="failed to get container status \"5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\": rpc error: code = NotFound desc = could not find container \"5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4\": container with ID starting with 5e0fdb411f5b6fee930a8acb72b85d7ab9c6128c69556fbb557cb4c13db122f4 not found: ID does not exist" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.777699 5002 scope.go:117] "RemoveContainer" containerID="bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe" Dec 03 16:35:24 crc kubenswrapper[5002]: E1203 16:35:24.778355 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\": container with ID starting with bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe not found: ID does not exist" containerID="bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.778375 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe"} err="failed to get container status \"bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\": rpc error: code = NotFound desc = could not find container \"bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe\": container with ID starting with bc033880a93d58b93adb3bf5bcfd6f61757de60e494c11f71e248b26212135fe not found: ID does not exist" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.778390 5002 scope.go:117] "RemoveContainer" containerID="1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df" Dec 03 16:35:24 crc kubenswrapper[5002]: E1203 16:35:24.778668 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\": container with ID starting with 1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df not found: ID does not exist" containerID="1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.778703 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df"} err="failed to get container status \"1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\": rpc error: code = NotFound desc = could not find container \"1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df\": container with ID starting with 1582806b2258246301a8e2d60143dfcb94c1f07af5fa61f4549d8efd51fd87df not found: ID does not exist" Dec 03 16:35:24 crc kubenswrapper[5002]: I1203 16:35:24.847194 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 03 16:35:26 crc kubenswrapper[5002]: I1203 16:35:26.842875 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:26 crc kubenswrapper[5002]: I1203 16:35:26.844611 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:28 crc kubenswrapper[5002]: I1203 16:35:28.851853 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" podUID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" containerName="oauth-openshift" containerID="cri-o://2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e" gracePeriod=15 Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.342000 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.343305 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.343597 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.343911 5002 status_manager.go:851] "Failed to get status for pod" podUID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-cfssq\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.430639 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-ocp-branding-template\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.430828 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-trusted-ca-bundle\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.430883 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-audit-policies\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.430917 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-error\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.430955 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-provider-selection\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.431003 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-router-certs\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.431032 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-audit-dir\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.431097 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6v2t\" (UniqueName: \"kubernetes.io/projected/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-kube-api-access-r6v2t\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.431124 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-serving-cert\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.431190 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-service-ca\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.431237 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-login\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.431264 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-cliconfig\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.431299 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-session\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.431337 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-idp-0-file-data\") pod \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\" (UID: \"aa31951c-cf30-4f7f-aaf5-f1a1109f8869\") " Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.432195 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.432266 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.432280 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.432331 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.432947 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.438131 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.438710 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.438876 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-kube-api-access-r6v2t" (OuterVolumeSpecName: "kube-api-access-r6v2t") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "kube-api-access-r6v2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.438986 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.439411 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.439678 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.440307 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.440519 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.440965 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "aa31951c-cf30-4f7f-aaf5-f1a1109f8869" (UID: "aa31951c-cf30-4f7f-aaf5-f1a1109f8869"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:35:29 crc kubenswrapper[5002]: E1203 16:35:29.498265 5002 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: E1203 16:35:29.498837 5002 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: E1203 16:35:29.499130 5002 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: E1203 16:35:29.499372 5002 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: E1203 16:35:29.499617 5002 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.499662 5002 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 03 16:35:29 crc kubenswrapper[5002]: E1203 16:35:29.499948 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" interval="200ms" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533074 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533118 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533131 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533143 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533155 5002 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533171 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533183 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533213 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533227 5002 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533239 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6v2t\" (UniqueName: \"kubernetes.io/projected/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-kube-api-access-r6v2t\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533250 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533261 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533274 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.533287 5002 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa31951c-cf30-4f7f-aaf5-f1a1109f8869-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.677649 5002 generic.go:334] "Generic (PLEG): container finished" podID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" containerID="2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e" exitCode=0 Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.677714 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" event={"ID":"aa31951c-cf30-4f7f-aaf5-f1a1109f8869","Type":"ContainerDied","Data":"2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e"} Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.677782 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" event={"ID":"aa31951c-cf30-4f7f-aaf5-f1a1109f8869","Type":"ContainerDied","Data":"626e1cda810eb8cf831145d1919bae1ce1a49d498923d58ab48c2806823120ae"} Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.677806 5002 scope.go:117] "RemoveContainer" containerID="2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.677827 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.678715 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.679942 5002 status_manager.go:851] "Failed to get status for pod" podUID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-cfssq\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.680469 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.698412 5002 status_manager.go:851] "Failed to get status for pod" podUID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-cfssq\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.699376 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.699828 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:29 crc kubenswrapper[5002]: E1203 16:35:29.701264 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" interval="400ms" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.709188 5002 scope.go:117] "RemoveContainer" containerID="2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e" Dec 03 16:35:29 crc kubenswrapper[5002]: E1203 16:35:29.709782 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e\": container with ID starting with 2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e not found: ID does not exist" containerID="2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e" Dec 03 16:35:29 crc kubenswrapper[5002]: I1203 16:35:29.709839 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e"} err="failed to get container status \"2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e\": rpc error: code = NotFound desc = could not find container \"2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e\": container with ID starting with 2d8b4b42b81bef6b90150c3a02ec9236c1a2ffa12baa08d522491a27b179a74e not found: ID does not exist" Dec 03 16:35:30 crc kubenswrapper[5002]: E1203 16:35:30.102880 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" interval="800ms" Dec 03 16:35:30 crc kubenswrapper[5002]: E1203 16:35:30.904100 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" interval="1.6s" Dec 03 16:35:31 crc kubenswrapper[5002]: E1203 16:35:31.909500 5002 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.155:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187dc1ce95000f15 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 16:35:22.006335253 +0000 UTC m=+245.420157141,LastTimestamp:2025-12-03 16:35:22.006335253 +0000 UTC m=+245.420157141,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 16:35:31 crc kubenswrapper[5002]: E1203 16:35:31.925666 5002 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.155:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" volumeName="registry-storage" Dec 03 16:35:32 crc kubenswrapper[5002]: E1203 16:35:32.505490 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" interval="3.2s" Dec 03 16:35:34 crc kubenswrapper[5002]: E1203 16:35:34.148455 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:35:34Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:35:34Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:35:34Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:35:34Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:34 crc kubenswrapper[5002]: E1203 16:35:34.148980 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:34 crc kubenswrapper[5002]: E1203 16:35:34.149358 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:34 crc kubenswrapper[5002]: E1203 16:35:34.149822 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:34 crc kubenswrapper[5002]: E1203 16:35:34.150156 5002 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:34 crc kubenswrapper[5002]: E1203 16:35:34.150180 5002 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:35:35 crc kubenswrapper[5002]: E1203 16:35:35.707402 5002 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.155:6443: connect: connection refused" interval="6.4s" Dec 03 16:35:35 crc kubenswrapper[5002]: I1203 16:35:35.839971 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:35 crc kubenswrapper[5002]: I1203 16:35:35.841331 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:35 crc kubenswrapper[5002]: I1203 16:35:35.842951 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:35 crc kubenswrapper[5002]: I1203 16:35:35.843684 5002 status_manager.go:851] "Failed to get status for pod" podUID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-cfssq\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:35 crc kubenswrapper[5002]: I1203 16:35:35.861504 5002 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e" Dec 03 16:35:35 crc kubenswrapper[5002]: I1203 16:35:35.861563 5002 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e" Dec 03 16:35:35 crc kubenswrapper[5002]: E1203 16:35:35.862418 5002 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:35 crc kubenswrapper[5002]: I1203 16:35:35.863008 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.728286 5002 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="810cf988c7aa45ab5d4ccedf59b33283cec841168f39aadb3c12819412f9c4db" exitCode=0 Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.728436 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"810cf988c7aa45ab5d4ccedf59b33283cec841168f39aadb3c12819412f9c4db"} Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.729028 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2ac1ab987a06259f311094fa4274350f2b39a94c830b40701f7284e9cb14b6d7"} Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.729579 5002 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.729612 5002 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.730282 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:36 crc kubenswrapper[5002]: E1203 16:35:36.730466 5002 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.730538 5002 status_manager.go:851] "Failed to get status for pod" podUID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-cfssq\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.731012 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.733521 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.733591 5002 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108" exitCode=1 Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.733640 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108"} Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.734389 5002 scope.go:117] "RemoveContainer" containerID="ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.734879 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.735554 5002 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.735976 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.736426 5002 status_manager.go:851] "Failed to get status for pod" podUID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-cfssq\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.844986 5002 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.845212 5002 status_manager.go:851] "Failed to get status for pod" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" pod="openshift-marketplace/redhat-operators-qkg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qkg9k\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.845353 5002 status_manager.go:851] "Failed to get status for pod" podUID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" pod="openshift-authentication/oauth-openshift-558db77b4-cfssq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-cfssq\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.845498 5002 status_manager.go:851] "Failed to get status for pod" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:36 crc kubenswrapper[5002]: I1203 16:35:36.845677 5002 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.155:6443: connect: connection refused" Dec 03 16:35:37 crc kubenswrapper[5002]: I1203 16:35:37.055362 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:35:37 crc kubenswrapper[5002]: I1203 16:35:37.759907 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bd96813a7b9ab3d3c28eac9f1c1d4635cc873398c71d60f34b8c5a28fb675f2f"} Dec 03 16:35:37 crc kubenswrapper[5002]: I1203 16:35:37.760454 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"17106ba0e4348415552adb27bfa12ea51d50e883389f2abf8d3bd250fdc34aa3"} Dec 03 16:35:37 crc kubenswrapper[5002]: I1203 16:35:37.760465 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ac44b256f28a2eff07c28d7f13ffc73c63079ca228dddb8d1dfe3f3895e673e6"} Dec 03 16:35:37 crc kubenswrapper[5002]: I1203 16:35:37.760474 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d476f7d6027dbd86e93ae71f190e499b815d07e94e1a011d6b65ab057b01dc4c"} Dec 03 16:35:37 crc kubenswrapper[5002]: I1203 16:35:37.766309 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 16:35:37 crc kubenswrapper[5002]: I1203 16:35:37.766349 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f7918d8982859ad0e2859e9c7a325c3e9ad81b31befd0eb6b3ef9deaa59e5f61"} Dec 03 16:35:38 crc kubenswrapper[5002]: I1203 16:35:38.780622 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"afbac8633a4ee757565aa073eb43c1104d9bf896a5a006618a0f7e0863a1cf9c"} Dec 03 16:35:38 crc kubenswrapper[5002]: I1203 16:35:38.782286 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:38 crc kubenswrapper[5002]: I1203 16:35:38.782723 5002 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e" Dec 03 16:35:38 crc kubenswrapper[5002]: I1203 16:35:38.782971 5002 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e" Dec 03 16:35:40 crc kubenswrapper[5002]: I1203 16:35:40.863911 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:40 crc kubenswrapper[5002]: I1203 16:35:40.864139 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:40 crc kubenswrapper[5002]: I1203 16:35:40.869295 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:43 crc kubenswrapper[5002]: I1203 16:35:43.890278 5002 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:35:43 crc kubenswrapper[5002]: I1203 16:35:43.996207 5002 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="a7bfc659-6f57-4187-b150-d8a89c744ef0" Dec 03 16:35:44 crc kubenswrapper[5002]: I1203 16:35:44.816850 5002 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e" Dec 03 16:35:44 crc kubenswrapper[5002]: I1203 16:35:44.816892 5002 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e" Dec 03 16:35:44 crc kubenswrapper[5002]: I1203 16:35:44.820862 5002 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="a7bfc659-6f57-4187-b150-d8a89c744ef0" Dec 03 16:35:46 crc kubenswrapper[5002]: I1203 16:35:46.617050 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:35:46 crc kubenswrapper[5002]: I1203 16:35:46.617815 5002 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 16:35:46 crc kubenswrapper[5002]: I1203 16:35:46.618057 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 16:35:47 crc kubenswrapper[5002]: I1203 16:35:47.056275 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:35:53 crc kubenswrapper[5002]: I1203 16:35:53.914854 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 16:35:54 crc kubenswrapper[5002]: I1203 16:35:54.272631 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 16:35:54 crc kubenswrapper[5002]: I1203 16:35:54.514016 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 16:35:54 crc kubenswrapper[5002]: I1203 16:35:54.639850 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 16:35:54 crc kubenswrapper[5002]: I1203 16:35:54.709223 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 16:35:54 crc kubenswrapper[5002]: I1203 16:35:54.781047 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 16:35:54 crc kubenswrapper[5002]: I1203 16:35:54.867469 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 16:35:55 crc kubenswrapper[5002]: I1203 16:35:55.096593 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 16:35:55 crc kubenswrapper[5002]: I1203 16:35:55.111401 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 16:35:55 crc kubenswrapper[5002]: I1203 16:35:55.142469 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 16:35:55 crc kubenswrapper[5002]: I1203 16:35:55.258967 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 16:35:55 crc kubenswrapper[5002]: I1203 16:35:55.523385 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 16:35:55 crc kubenswrapper[5002]: I1203 16:35:55.538974 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 16:35:55 crc kubenswrapper[5002]: I1203 16:35:55.589865 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 16:35:55 crc kubenswrapper[5002]: I1203 16:35:55.745604 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 16:35:55 crc kubenswrapper[5002]: I1203 16:35:55.918260 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 16:35:55 crc kubenswrapper[5002]: I1203 16:35:55.983403 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 16:35:56 crc kubenswrapper[5002]: I1203 16:35:56.116938 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 16:35:56 crc kubenswrapper[5002]: I1203 16:35:56.212524 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 16:35:56 crc kubenswrapper[5002]: I1203 16:35:56.259996 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 16:35:56 crc kubenswrapper[5002]: I1203 16:35:56.323378 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 16:35:56 crc kubenswrapper[5002]: I1203 16:35:56.433197 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 16:35:56 crc kubenswrapper[5002]: I1203 16:35:56.479931 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 16:35:56 crc kubenswrapper[5002]: I1203 16:35:56.574093 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 16:35:56 crc kubenswrapper[5002]: I1203 16:35:56.616667 5002 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 16:35:56 crc kubenswrapper[5002]: I1203 16:35:56.617132 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 16:35:56 crc kubenswrapper[5002]: I1203 16:35:56.720908 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 16:35:56 crc kubenswrapper[5002]: I1203 16:35:56.775455 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.070160 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.101004 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.174014 5002 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.205478 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.278653 5002 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.302207 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.372962 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.706085 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.781579 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.802123 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.857462 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.861932 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.891603 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 16:35:57 crc kubenswrapper[5002]: I1203 16:35:57.984522 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.150776 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.205797 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.335153 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.573247 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.680785 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.692561 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.701971 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.803103 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.804907 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.809875 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.887699 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.900284 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 16:35:58 crc kubenswrapper[5002]: I1203 16:35:58.940479 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.188926 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.219274 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.258574 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.269883 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.280779 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.307808 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.308990 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.322208 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.406411 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.408490 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.425843 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.545950 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.563142 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.564532 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.640852 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.710101 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.804970 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.862439 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.933278 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 16:35:59 crc kubenswrapper[5002]: I1203 16:35:59.971457 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.049222 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.055031 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.087254 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.108596 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.122090 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.185017 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.211523 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.263202 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.386815 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.415670 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.516368 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.547473 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.666157 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.800155 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.863812 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.864003 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.906552 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 16:36:00 crc kubenswrapper[5002]: I1203 16:36:00.998372 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 16:36:01 crc kubenswrapper[5002]: I1203 16:36:01.131530 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 16:36:01 crc kubenswrapper[5002]: I1203 16:36:01.416304 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 16:36:01 crc kubenswrapper[5002]: I1203 16:36:01.434701 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 16:36:01 crc kubenswrapper[5002]: I1203 16:36:01.457667 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 16:36:01 crc kubenswrapper[5002]: I1203 16:36:01.493967 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 16:36:01 crc kubenswrapper[5002]: I1203 16:36:01.506951 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 16:36:01 crc kubenswrapper[5002]: I1203 16:36:01.609511 5002 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 16:36:01 crc kubenswrapper[5002]: I1203 16:36:01.737366 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 16:36:01 crc kubenswrapper[5002]: I1203 16:36:01.743671 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 16:36:01 crc kubenswrapper[5002]: I1203 16:36:01.754549 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 16:36:01 crc kubenswrapper[5002]: I1203 16:36:01.815922 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 16:36:01 crc kubenswrapper[5002]: I1203 16:36:01.867609 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.031689 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.082457 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.233205 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.364484 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.382268 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.418190 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.451348 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.468406 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.470338 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.509587 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.565719 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.677205 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.702383 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.837959 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.871695 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.906600 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 16:36:02 crc kubenswrapper[5002]: I1203 16:36:02.992313 5002 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.065847 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.116190 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.167576 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.214886 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.238605 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.299047 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.386137 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.424873 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.516720 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.604557 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.609266 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.619349 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.778507 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.799246 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.914890 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.956819 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 16:36:03 crc kubenswrapper[5002]: I1203 16:36:03.991773 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.062959 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.077822 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.102765 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.136683 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.145014 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.145038 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.146298 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.212434 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.372950 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.387679 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.388211 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.426821 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.437264 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.456123 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.535775 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.601523 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.658512 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.684574 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.757840 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.776397 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.817224 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.829664 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.920916 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.957122 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 16:36:04 crc kubenswrapper[5002]: I1203 16:36:04.997976 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.089108 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.156459 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.245395 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.262503 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.267695 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.357407 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.361006 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.367689 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.534777 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.582132 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.677816 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.702823 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.719857 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.721669 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.787145 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.805965 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.854527 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.869803 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.947340 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.963922 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.970352 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 16:36:05 crc kubenswrapper[5002]: I1203 16:36:05.993715 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.101062 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.111398 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.159325 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.173028 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.398198 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.414754 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.431702 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.497247 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.501239 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.605735 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.616956 5002 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.617043 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.617127 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.618083 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"f7918d8982859ad0e2859e9c7a325c3e9ad81b31befd0eb6b3ef9deaa59e5f61"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.618267 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://f7918d8982859ad0e2859e9c7a325c3e9ad81b31befd0eb6b3ef9deaa59e5f61" gracePeriod=30 Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.792172 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 16:36:06 crc kubenswrapper[5002]: I1203 16:36:06.897547 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 16:36:07 crc kubenswrapper[5002]: I1203 16:36:07.134329 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 16:36:07 crc kubenswrapper[5002]: I1203 16:36:07.278862 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 16:36:07 crc kubenswrapper[5002]: I1203 16:36:07.389146 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 16:36:07 crc kubenswrapper[5002]: I1203 16:36:07.423813 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 16:36:07 crc kubenswrapper[5002]: I1203 16:36:07.453058 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 16:36:07 crc kubenswrapper[5002]: I1203 16:36:07.481898 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 16:36:07 crc kubenswrapper[5002]: I1203 16:36:07.517302 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 16:36:07 crc kubenswrapper[5002]: I1203 16:36:07.529248 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 16:36:07 crc kubenswrapper[5002]: I1203 16:36:07.679657 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 16:36:07 crc kubenswrapper[5002]: I1203 16:36:07.717053 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 16:36:07 crc kubenswrapper[5002]: I1203 16:36:07.827950 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 16:36:07 crc kubenswrapper[5002]: I1203 16:36:07.947392 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.033450 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.045188 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.097393 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.201942 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.204946 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.230354 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.266951 5002 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.272496 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qkg9k","openshift-authentication/oauth-openshift-558db77b4-cfssq","openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.272600 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-985c66b4-xmvlb","openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 16:36:08 crc kubenswrapper[5002]: E1203 16:36:08.272872 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" containerName="oauth-openshift" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.272888 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" containerName="oauth-openshift" Dec 03 16:36:08 crc kubenswrapper[5002]: E1203 16:36:08.272905 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" containerName="installer" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.272914 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" containerName="installer" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.273025 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" containerName="oauth-openshift" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.273046 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="16c22cb6-76b9-45cf-bff8-5c82a4a54d0c" containerName="installer" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.273082 5002 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.273131 5002 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b8e4c2a0-a478-4f07-a4c9-5f5e9411fe0e" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.273554 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.278146 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.278191 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.278493 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.278609 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.278663 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.278623 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.279003 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.279092 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.279235 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.279247 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.279371 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.279445 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.282832 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.285156 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.298146 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.298191 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.319963 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.321265 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.324685 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=25.324653057 podStartE2EDuration="25.324653057s" podCreationTimestamp="2025-12-03 16:35:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:36:08.305896331 +0000 UTC m=+291.719718239" watchObservedRunningTime="2025-12-03 16:36:08.324653057 +0000 UTC m=+291.738474945" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.398204 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.431828 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.431894 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.431914 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-router-certs\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.431937 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-audit-policies\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.431964 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-audit-dir\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.431988 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-session\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.432014 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.432031 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlqrp\" (UniqueName: \"kubernetes.io/projected/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-kube-api-access-vlqrp\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.432050 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.432066 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.432093 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-service-ca\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.432110 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-user-template-login\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.432128 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.432144 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-user-template-error\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.450153 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.532115 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.532968 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlqrp\" (UniqueName: \"kubernetes.io/projected/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-kube-api-access-vlqrp\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533009 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533036 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533066 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-service-ca\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533087 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-user-template-login\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533104 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533121 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-user-template-error\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533164 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533182 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533199 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-router-certs\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533222 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-audit-policies\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533237 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-audit-dir\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533255 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-session\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.533298 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.534455 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-audit-dir\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.538334 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.538935 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-service-ca\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.539446 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.540265 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-audit-policies\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.541123 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.541317 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.541328 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.541793 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.542315 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-user-template-error\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.543464 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-user-template-login\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.543697 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.544300 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.552237 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-session\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.552433 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-v4-0-config-system-router-certs\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.556297 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlqrp\" (UniqueName: \"kubernetes.io/projected/66564a1e-9b56-48bf-a2d8-b4b10ac267c9-kube-api-access-vlqrp\") pod \"oauth-openshift-985c66b4-xmvlb\" (UID: \"66564a1e-9b56-48bf-a2d8-b4b10ac267c9\") " pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.597692 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.708161 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.848643 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa31951c-cf30-4f7f-aaf5-f1a1109f8869" path="/var/lib/kubelet/pods/aa31951c-cf30-4f7f-aaf5-f1a1109f8869/volumes" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.849793 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b830fe22-e3e0-4de1-8aa4-8e16a3e9594c" path="/var/lib/kubelet/pods/b830fe22-e3e0-4de1-8aa4-8e16a3e9594c/volumes" Dec 03 16:36:08 crc kubenswrapper[5002]: I1203 16:36:08.981546 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.020945 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.032728 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-985c66b4-xmvlb"] Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.218627 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.220012 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.243734 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.315729 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.665644 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.690727 5002 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.704110 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.916625 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.984844 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" event={"ID":"66564a1e-9b56-48bf-a2d8-b4b10ac267c9","Type":"ContainerStarted","Data":"2ffb564d25d303f005a7752d1917f1486f13e51b2d4ecfcf33fbeaa2c96235a2"} Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.984921 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" event={"ID":"66564a1e-9b56-48bf-a2d8-b4b10ac267c9","Type":"ContainerStarted","Data":"f2440eef800687cb643926c8848ccae39ecd13a67a5825ed6a0f088c21d4d3f5"} Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.985420 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:09 crc kubenswrapper[5002]: I1203 16:36:09.992038 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" Dec 03 16:36:10 crc kubenswrapper[5002]: I1203 16:36:10.010305 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-985c66b4-xmvlb" podStartSLOduration=67.010282259 podStartE2EDuration="1m7.010282259s" podCreationTimestamp="2025-12-03 16:35:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:36:10.00557801 +0000 UTC m=+293.419399918" watchObservedRunningTime="2025-12-03 16:36:10.010282259 +0000 UTC m=+293.424104167" Dec 03 16:36:10 crc kubenswrapper[5002]: I1203 16:36:10.047579 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 16:36:10 crc kubenswrapper[5002]: I1203 16:36:10.402164 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 16:36:10 crc kubenswrapper[5002]: I1203 16:36:10.663241 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 16:36:16 crc kubenswrapper[5002]: I1203 16:36:16.707114 5002 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 03 16:36:16 crc kubenswrapper[5002]: I1203 16:36:16.721842 5002 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 16:36:16 crc kubenswrapper[5002]: I1203 16:36:16.722337 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://04a2b01a5bd89021c47b30f2d71524bd75d59863e10c7073a0150673a2112d64" gracePeriod=5 Dec 03 16:36:21 crc kubenswrapper[5002]: E1203 16:36:21.875280 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-04a2b01a5bd89021c47b30f2d71524bd75d59863e10c7073a0150673a2112d64.scope\": RecentStats: unable to find data in memory cache]" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.070162 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.070242 5002 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="04a2b01a5bd89021c47b30f2d71524bd75d59863e10c7073a0150673a2112d64" exitCode=137 Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.312490 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.312590 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.460106 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.460211 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.460247 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.460286 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.460294 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.460335 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.460349 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.460706 5002 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.460723 5002 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.460865 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.460911 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.469850 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.561880 5002 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.561922 5002 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.561932 5002 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:22 crc kubenswrapper[5002]: I1203 16:36:22.854077 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 03 16:36:23 crc kubenswrapper[5002]: I1203 16:36:23.078854 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 16:36:23 crc kubenswrapper[5002]: I1203 16:36:23.079205 5002 scope.go:117] "RemoveContainer" containerID="04a2b01a5bd89021c47b30f2d71524bd75d59863e10c7073a0150673a2112d64" Dec 03 16:36:23 crc kubenswrapper[5002]: I1203 16:36:23.079320 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:36:26 crc kubenswrapper[5002]: I1203 16:36:26.102873 5002 generic.go:334] "Generic (PLEG): container finished" podID="b1e5478b-5439-41eb-b83b-700e37123781" containerID="049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54" exitCode=0 Dec 03 16:36:26 crc kubenswrapper[5002]: I1203 16:36:26.102987 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" event={"ID":"b1e5478b-5439-41eb-b83b-700e37123781","Type":"ContainerDied","Data":"049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54"} Dec 03 16:36:26 crc kubenswrapper[5002]: I1203 16:36:26.104535 5002 scope.go:117] "RemoveContainer" containerID="049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54" Dec 03 16:36:27 crc kubenswrapper[5002]: I1203 16:36:27.113355 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" event={"ID":"b1e5478b-5439-41eb-b83b-700e37123781","Type":"ContainerStarted","Data":"804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9"} Dec 03 16:36:27 crc kubenswrapper[5002]: I1203 16:36:27.113992 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:36:27 crc kubenswrapper[5002]: I1203 16:36:27.116658 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:36:34 crc kubenswrapper[5002]: I1203 16:36:34.650731 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 16:36:37 crc kubenswrapper[5002]: I1203 16:36:37.174358 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 03 16:36:37 crc kubenswrapper[5002]: I1203 16:36:37.177361 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 16:36:37 crc kubenswrapper[5002]: I1203 16:36:37.177432 5002 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="f7918d8982859ad0e2859e9c7a325c3e9ad81b31befd0eb6b3ef9deaa59e5f61" exitCode=137 Dec 03 16:36:37 crc kubenswrapper[5002]: I1203 16:36:37.177485 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"f7918d8982859ad0e2859e9c7a325c3e9ad81b31befd0eb6b3ef9deaa59e5f61"} Dec 03 16:36:37 crc kubenswrapper[5002]: I1203 16:36:37.177576 5002 scope.go:117] "RemoveContainer" containerID="ecacdfa123de0dce06e2429196c727f1bf469636b9063135e8b2a9d21a375108" Dec 03 16:36:38 crc kubenswrapper[5002]: I1203 16:36:38.185767 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 03 16:36:38 crc kubenswrapper[5002]: I1203 16:36:38.188320 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b89b9613ab86893b8b3a387ad06761897eac7085214707624ad9b6aee36a31d0"} Dec 03 16:36:38 crc kubenswrapper[5002]: I1203 16:36:38.354714 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 16:36:46 crc kubenswrapper[5002]: I1203 16:36:46.616293 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:36:46 crc kubenswrapper[5002]: I1203 16:36:46.622227 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:36:47 crc kubenswrapper[5002]: I1203 16:36:47.055970 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:36:47 crc kubenswrapper[5002]: I1203 16:36:47.063265 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:36:55 crc kubenswrapper[5002]: I1203 16:36:55.407524 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ln6gt"] Dec 03 16:36:55 crc kubenswrapper[5002]: I1203 16:36:55.408821 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" podUID="a5977d33-9bdf-49f2-ba26-a3e2df8c4063" containerName="controller-manager" containerID="cri-o://ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f" gracePeriod=30 Dec 03 16:36:55 crc kubenswrapper[5002]: I1203 16:36:55.411156 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq"] Dec 03 16:36:55 crc kubenswrapper[5002]: I1203 16:36:55.411456 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" podUID="3fad564d-947c-411c-b1e7-0e5d82ebb310" containerName="route-controller-manager" containerID="cri-o://190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a" gracePeriod=30 Dec 03 16:36:55 crc kubenswrapper[5002]: I1203 16:36:55.862262 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:36:55 crc kubenswrapper[5002]: I1203 16:36:55.952437 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.025205 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-config\") pod \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.025345 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jldnw\" (UniqueName: \"kubernetes.io/projected/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-kube-api-access-jldnw\") pod \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.025426 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-serving-cert\") pod \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.025464 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-proxy-ca-bundles\") pod \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.025489 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-client-ca\") pod \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\" (UID: \"a5977d33-9bdf-49f2-ba26-a3e2df8c4063\") " Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.026350 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-client-ca" (OuterVolumeSpecName: "client-ca") pod "a5977d33-9bdf-49f2-ba26-a3e2df8c4063" (UID: "a5977d33-9bdf-49f2-ba26-a3e2df8c4063"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.026428 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a5977d33-9bdf-49f2-ba26-a3e2df8c4063" (UID: "a5977d33-9bdf-49f2-ba26-a3e2df8c4063"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.026514 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-config" (OuterVolumeSpecName: "config") pod "a5977d33-9bdf-49f2-ba26-a3e2df8c4063" (UID: "a5977d33-9bdf-49f2-ba26-a3e2df8c4063"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.033826 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-kube-api-access-jldnw" (OuterVolumeSpecName: "kube-api-access-jldnw") pod "a5977d33-9bdf-49f2-ba26-a3e2df8c4063" (UID: "a5977d33-9bdf-49f2-ba26-a3e2df8c4063"). InnerVolumeSpecName "kube-api-access-jldnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.035198 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a5977d33-9bdf-49f2-ba26-a3e2df8c4063" (UID: "a5977d33-9bdf-49f2-ba26-a3e2df8c4063"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.127123 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cx24n\" (UniqueName: \"kubernetes.io/projected/3fad564d-947c-411c-b1e7-0e5d82ebb310-kube-api-access-cx24n\") pod \"3fad564d-947c-411c-b1e7-0e5d82ebb310\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.127204 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3fad564d-947c-411c-b1e7-0e5d82ebb310-client-ca\") pod \"3fad564d-947c-411c-b1e7-0e5d82ebb310\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.127238 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fad564d-947c-411c-b1e7-0e5d82ebb310-serving-cert\") pod \"3fad564d-947c-411c-b1e7-0e5d82ebb310\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.127305 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3fad564d-947c-411c-b1e7-0e5d82ebb310-config\") pod \"3fad564d-947c-411c-b1e7-0e5d82ebb310\" (UID: \"3fad564d-947c-411c-b1e7-0e5d82ebb310\") " Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.127666 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jldnw\" (UniqueName: \"kubernetes.io/projected/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-kube-api-access-jldnw\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.127682 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.127691 5002 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.127701 5002 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.127713 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5977d33-9bdf-49f2-ba26-a3e2df8c4063-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.128182 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3fad564d-947c-411c-b1e7-0e5d82ebb310-client-ca" (OuterVolumeSpecName: "client-ca") pod "3fad564d-947c-411c-b1e7-0e5d82ebb310" (UID: "3fad564d-947c-411c-b1e7-0e5d82ebb310"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.128257 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3fad564d-947c-411c-b1e7-0e5d82ebb310-config" (OuterVolumeSpecName: "config") pod "3fad564d-947c-411c-b1e7-0e5d82ebb310" (UID: "3fad564d-947c-411c-b1e7-0e5d82ebb310"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.131401 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fad564d-947c-411c-b1e7-0e5d82ebb310-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3fad564d-947c-411c-b1e7-0e5d82ebb310" (UID: "3fad564d-947c-411c-b1e7-0e5d82ebb310"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.132105 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fad564d-947c-411c-b1e7-0e5d82ebb310-kube-api-access-cx24n" (OuterVolumeSpecName: "kube-api-access-cx24n") pod "3fad564d-947c-411c-b1e7-0e5d82ebb310" (UID: "3fad564d-947c-411c-b1e7-0e5d82ebb310"). InnerVolumeSpecName "kube-api-access-cx24n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.228617 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cx24n\" (UniqueName: \"kubernetes.io/projected/3fad564d-947c-411c-b1e7-0e5d82ebb310-kube-api-access-cx24n\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.228655 5002 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3fad564d-947c-411c-b1e7-0e5d82ebb310-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.228667 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fad564d-947c-411c-b1e7-0e5d82ebb310-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.228678 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3fad564d-947c-411c-b1e7-0e5d82ebb310-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.305521 5002 generic.go:334] "Generic (PLEG): container finished" podID="3fad564d-947c-411c-b1e7-0e5d82ebb310" containerID="190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a" exitCode=0 Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.305617 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" event={"ID":"3fad564d-947c-411c-b1e7-0e5d82ebb310","Type":"ContainerDied","Data":"190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a"} Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.305633 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.305662 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq" event={"ID":"3fad564d-947c-411c-b1e7-0e5d82ebb310","Type":"ContainerDied","Data":"79e34a89a195a61ffac6c9c48ea12a628d151f7eea86e8230c776624561c9e04"} Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.305686 5002 scope.go:117] "RemoveContainer" containerID="190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.306958 5002 generic.go:334] "Generic (PLEG): container finished" podID="a5977d33-9bdf-49f2-ba26-a3e2df8c4063" containerID="ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f" exitCode=0 Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.307046 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" event={"ID":"a5977d33-9bdf-49f2-ba26-a3e2df8c4063","Type":"ContainerDied","Data":"ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f"} Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.307095 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" event={"ID":"a5977d33-9bdf-49f2-ba26-a3e2df8c4063","Type":"ContainerDied","Data":"4b40af3819f9fd03b59b81fa6b9bf13b547053bc69942a856ef2743c19974131"} Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.307103 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-ln6gt" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.332547 5002 scope.go:117] "RemoveContainer" containerID="190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a" Dec 03 16:36:56 crc kubenswrapper[5002]: E1203 16:36:56.333388 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a\": container with ID starting with 190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a not found: ID does not exist" containerID="190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.333466 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a"} err="failed to get container status \"190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a\": rpc error: code = NotFound desc = could not find container \"190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a\": container with ID starting with 190ef4c077e7dfaa7d4582a7af8008a1419d3c168aaca949f7981e60a46ed95a not found: ID does not exist" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.333506 5002 scope.go:117] "RemoveContainer" containerID="ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.355312 5002 scope.go:117] "RemoveContainer" containerID="ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f" Dec 03 16:36:56 crc kubenswrapper[5002]: E1203 16:36:56.356124 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f\": container with ID starting with ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f not found: ID does not exist" containerID="ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.356168 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f"} err="failed to get container status \"ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f\": rpc error: code = NotFound desc = could not find container \"ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f\": container with ID starting with ea7e4e5886f92b3bca5527c79e5285da0eb816bc9c08af6005d20f7d47ee134f not found: ID does not exist" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.358694 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq"] Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.364549 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5prhq"] Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.367986 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ln6gt"] Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.370884 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ln6gt"] Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.780115 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-58c8c7d596-4xmgw"] Dec 03 16:36:56 crc kubenswrapper[5002]: E1203 16:36:56.780477 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.780492 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 16:36:56 crc kubenswrapper[5002]: E1203 16:36:56.780502 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fad564d-947c-411c-b1e7-0e5d82ebb310" containerName="route-controller-manager" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.780509 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fad564d-947c-411c-b1e7-0e5d82ebb310" containerName="route-controller-manager" Dec 03 16:36:56 crc kubenswrapper[5002]: E1203 16:36:56.780530 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5977d33-9bdf-49f2-ba26-a3e2df8c4063" containerName="controller-manager" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.780537 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5977d33-9bdf-49f2-ba26-a3e2df8c4063" containerName="controller-manager" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.780635 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5977d33-9bdf-49f2-ba26-a3e2df8c4063" containerName="controller-manager" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.780651 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fad564d-947c-411c-b1e7-0e5d82ebb310" containerName="route-controller-manager" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.780661 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.781196 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.784024 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll"] Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.784914 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.787738 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.788267 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.788689 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.789652 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.789901 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.790101 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.790261 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.790432 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.790966 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.791153 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.791293 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.793003 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.797116 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.803415 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58c8c7d596-4xmgw"] Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.809080 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll"] Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.837924 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvk8m\" (UniqueName: \"kubernetes.io/projected/fbec4cc5-2873-4f93-aea5-13a34150d5e4-kube-api-access-gvk8m\") pod \"route-controller-manager-58464fc68b-lvmll\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.837980 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-client-ca\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.838003 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbec4cc5-2873-4f93-aea5-13a34150d5e4-serving-cert\") pod \"route-controller-manager-58464fc68b-lvmll\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.838050 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-config\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.838085 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5031fc3-176e-45fe-8c26-7777fd68a9f3-serving-cert\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.838113 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbec4cc5-2873-4f93-aea5-13a34150d5e4-client-ca\") pod \"route-controller-manager-58464fc68b-lvmll\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.838141 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbec4cc5-2873-4f93-aea5-13a34150d5e4-config\") pod \"route-controller-manager-58464fc68b-lvmll\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.838202 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj6jd\" (UniqueName: \"kubernetes.io/projected/d5031fc3-176e-45fe-8c26-7777fd68a9f3-kube-api-access-vj6jd\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.838229 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-proxy-ca-bundles\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.853926 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fad564d-947c-411c-b1e7-0e5d82ebb310" path="/var/lib/kubelet/pods/3fad564d-947c-411c-b1e7-0e5d82ebb310/volumes" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.854866 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5977d33-9bdf-49f2-ba26-a3e2df8c4063" path="/var/lib/kubelet/pods/a5977d33-9bdf-49f2-ba26-a3e2df8c4063/volumes" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.939437 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5031fc3-176e-45fe-8c26-7777fd68a9f3-serving-cert\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.939488 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbec4cc5-2873-4f93-aea5-13a34150d5e4-client-ca\") pod \"route-controller-manager-58464fc68b-lvmll\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.939518 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbec4cc5-2873-4f93-aea5-13a34150d5e4-config\") pod \"route-controller-manager-58464fc68b-lvmll\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.939586 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj6jd\" (UniqueName: \"kubernetes.io/projected/d5031fc3-176e-45fe-8c26-7777fd68a9f3-kube-api-access-vj6jd\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.939636 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-proxy-ca-bundles\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.939698 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvk8m\" (UniqueName: \"kubernetes.io/projected/fbec4cc5-2873-4f93-aea5-13a34150d5e4-kube-api-access-gvk8m\") pod \"route-controller-manager-58464fc68b-lvmll\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.939721 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-client-ca\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.939739 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbec4cc5-2873-4f93-aea5-13a34150d5e4-serving-cert\") pod \"route-controller-manager-58464fc68b-lvmll\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.939786 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-config\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.940945 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbec4cc5-2873-4f93-aea5-13a34150d5e4-client-ca\") pod \"route-controller-manager-58464fc68b-lvmll\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.941029 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbec4cc5-2873-4f93-aea5-13a34150d5e4-config\") pod \"route-controller-manager-58464fc68b-lvmll\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.941389 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-config\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.941570 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-client-ca\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.941655 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-proxy-ca-bundles\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.944592 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbec4cc5-2873-4f93-aea5-13a34150d5e4-serving-cert\") pod \"route-controller-manager-58464fc68b-lvmll\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.945791 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5031fc3-176e-45fe-8c26-7777fd68a9f3-serving-cert\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.957862 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj6jd\" (UniqueName: \"kubernetes.io/projected/d5031fc3-176e-45fe-8c26-7777fd68a9f3-kube-api-access-vj6jd\") pod \"controller-manager-58c8c7d596-4xmgw\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:56 crc kubenswrapper[5002]: I1203 16:36:56.957906 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvk8m\" (UniqueName: \"kubernetes.io/projected/fbec4cc5-2873-4f93-aea5-13a34150d5e4-kube-api-access-gvk8m\") pod \"route-controller-manager-58464fc68b-lvmll\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:57 crc kubenswrapper[5002]: I1203 16:36:57.099173 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:57 crc kubenswrapper[5002]: I1203 16:36:57.111445 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:57 crc kubenswrapper[5002]: W1203 16:36:57.369283 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbec4cc5_2873_4f93_aea5_13a34150d5e4.slice/crio-5e7afa49e88976bf05ae46a0b823f8ba9694fea2200676e218fc0749ecdef0f7 WatchSource:0}: Error finding container 5e7afa49e88976bf05ae46a0b823f8ba9694fea2200676e218fc0749ecdef0f7: Status 404 returned error can't find the container with id 5e7afa49e88976bf05ae46a0b823f8ba9694fea2200676e218fc0749ecdef0f7 Dec 03 16:36:57 crc kubenswrapper[5002]: I1203 16:36:57.373662 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll"] Dec 03 16:36:57 crc kubenswrapper[5002]: I1203 16:36:57.378474 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58c8c7d596-4xmgw"] Dec 03 16:36:57 crc kubenswrapper[5002]: W1203 16:36:57.379474 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5031fc3_176e_45fe_8c26_7777fd68a9f3.slice/crio-c70ced1dbf443582a67078371c6fe5af92eda774368538f92c82fb826920f29d WatchSource:0}: Error finding container c70ced1dbf443582a67078371c6fe5af92eda774368538f92c82fb826920f29d: Status 404 returned error can't find the container with id c70ced1dbf443582a67078371c6fe5af92eda774368538f92c82fb826920f29d Dec 03 16:36:58 crc kubenswrapper[5002]: I1203 16:36:58.325218 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" event={"ID":"fbec4cc5-2873-4f93-aea5-13a34150d5e4","Type":"ContainerStarted","Data":"6a42979dfab6aa0023435f91aea6adbe75526ceff810c8f4f5b657bc88825dab"} Dec 03 16:36:58 crc kubenswrapper[5002]: I1203 16:36:58.325735 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" event={"ID":"fbec4cc5-2873-4f93-aea5-13a34150d5e4","Type":"ContainerStarted","Data":"5e7afa49e88976bf05ae46a0b823f8ba9694fea2200676e218fc0749ecdef0f7"} Dec 03 16:36:58 crc kubenswrapper[5002]: I1203 16:36:58.325777 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:58 crc kubenswrapper[5002]: I1203 16:36:58.328771 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" event={"ID":"d5031fc3-176e-45fe-8c26-7777fd68a9f3","Type":"ContainerStarted","Data":"fc3d8a520d183435c178dc7b13638ed97649663913f344cc722f81987b6880e3"} Dec 03 16:36:58 crc kubenswrapper[5002]: I1203 16:36:58.328803 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" event={"ID":"d5031fc3-176e-45fe-8c26-7777fd68a9f3","Type":"ContainerStarted","Data":"c70ced1dbf443582a67078371c6fe5af92eda774368538f92c82fb826920f29d"} Dec 03 16:36:58 crc kubenswrapper[5002]: I1203 16:36:58.328995 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:58 crc kubenswrapper[5002]: I1203 16:36:58.330857 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:36:58 crc kubenswrapper[5002]: I1203 16:36:58.332980 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:36:58 crc kubenswrapper[5002]: I1203 16:36:58.342212 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" podStartSLOduration=3.342195576 podStartE2EDuration="3.342195576s" podCreationTimestamp="2025-12-03 16:36:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:36:58.340680253 +0000 UTC m=+341.754502141" watchObservedRunningTime="2025-12-03 16:36:58.342195576 +0000 UTC m=+341.756017464" Dec 03 16:36:58 crc kubenswrapper[5002]: I1203 16:36:58.377488 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" podStartSLOduration=3.377468965 podStartE2EDuration="3.377468965s" podCreationTimestamp="2025-12-03 16:36:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:36:58.37692097 +0000 UTC m=+341.790742848" watchObservedRunningTime="2025-12-03 16:36:58.377468965 +0000 UTC m=+341.791290853" Dec 03 16:37:20 crc kubenswrapper[5002]: I1203 16:37:20.917313 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:37:20 crc kubenswrapper[5002]: I1203 16:37:20.918427 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.340277 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-grml5"] Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.341559 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.358724 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-grml5"] Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.471008 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5e8b2c5b-b862-4161-97a0-a267fd936a0f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.471084 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5e8b2c5b-b862-4161-97a0-a267fd936a0f-registry-tls\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.471164 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv8t7\" (UniqueName: \"kubernetes.io/projected/5e8b2c5b-b862-4161-97a0-a267fd936a0f-kube-api-access-nv8t7\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.471272 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.471320 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5e8b2c5b-b862-4161-97a0-a267fd936a0f-registry-certificates\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.471389 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5e8b2c5b-b862-4161-97a0-a267fd936a0f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.471414 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5e8b2c5b-b862-4161-97a0-a267fd936a0f-trusted-ca\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.471496 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5e8b2c5b-b862-4161-97a0-a267fd936a0f-bound-sa-token\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.499972 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.572253 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5e8b2c5b-b862-4161-97a0-a267fd936a0f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.572687 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5e8b2c5b-b862-4161-97a0-a267fd936a0f-registry-tls\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.572712 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv8t7\" (UniqueName: \"kubernetes.io/projected/5e8b2c5b-b862-4161-97a0-a267fd936a0f-kube-api-access-nv8t7\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.572760 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5e8b2c5b-b862-4161-97a0-a267fd936a0f-registry-certificates\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.572801 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5e8b2c5b-b862-4161-97a0-a267fd936a0f-trusted-ca\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.572829 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5e8b2c5b-b862-4161-97a0-a267fd936a0f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.572855 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5e8b2c5b-b862-4161-97a0-a267fd936a0f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.572880 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5e8b2c5b-b862-4161-97a0-a267fd936a0f-bound-sa-token\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.574609 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5e8b2c5b-b862-4161-97a0-a267fd936a0f-registry-certificates\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.575039 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5e8b2c5b-b862-4161-97a0-a267fd936a0f-trusted-ca\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.579657 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5e8b2c5b-b862-4161-97a0-a267fd936a0f-registry-tls\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.579905 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5e8b2c5b-b862-4161-97a0-a267fd936a0f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.592307 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5e8b2c5b-b862-4161-97a0-a267fd936a0f-bound-sa-token\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.594429 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv8t7\" (UniqueName: \"kubernetes.io/projected/5e8b2c5b-b862-4161-97a0-a267fd936a0f-kube-api-access-nv8t7\") pod \"image-registry-66df7c8f76-grml5\" (UID: \"5e8b2c5b-b862-4161-97a0-a267fd936a0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:24 crc kubenswrapper[5002]: I1203 16:37:24.660424 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:25 crc kubenswrapper[5002]: I1203 16:37:25.138083 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-grml5"] Dec 03 16:37:25 crc kubenswrapper[5002]: I1203 16:37:25.492596 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-grml5" event={"ID":"5e8b2c5b-b862-4161-97a0-a267fd936a0f","Type":"ContainerStarted","Data":"afc61c879a95852af771a8dd27a20365405b22da6a970eaba7f0b02ac8eb2702"} Dec 03 16:37:25 crc kubenswrapper[5002]: I1203 16:37:25.492651 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-grml5" event={"ID":"5e8b2c5b-b862-4161-97a0-a267fd936a0f","Type":"ContainerStarted","Data":"42d912abc53301ee1e77b43766456446af6158054408abcfccb430f76037ca4e"} Dec 03 16:37:25 crc kubenswrapper[5002]: I1203 16:37:25.492877 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:25 crc kubenswrapper[5002]: I1203 16:37:25.515976 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-grml5" podStartSLOduration=1.515944993 podStartE2EDuration="1.515944993s" podCreationTimestamp="2025-12-03 16:37:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:37:25.515181023 +0000 UTC m=+368.929002901" watchObservedRunningTime="2025-12-03 16:37:25.515944993 +0000 UTC m=+368.929766881" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.162901 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-58c8c7d596-4xmgw"] Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.164041 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" podUID="d5031fc3-176e-45fe-8c26-7777fd68a9f3" containerName="controller-manager" containerID="cri-o://fc3d8a520d183435c178dc7b13638ed97649663913f344cc722f81987b6880e3" gracePeriod=30 Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.309104 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll"] Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.309457 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" podUID="fbec4cc5-2873-4f93-aea5-13a34150d5e4" containerName="route-controller-manager" containerID="cri-o://6a42979dfab6aa0023435f91aea6adbe75526ceff810c8f4f5b657bc88825dab" gracePeriod=30 Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.558612 5002 generic.go:334] "Generic (PLEG): container finished" podID="d5031fc3-176e-45fe-8c26-7777fd68a9f3" containerID="fc3d8a520d183435c178dc7b13638ed97649663913f344cc722f81987b6880e3" exitCode=0 Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.558925 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" event={"ID":"d5031fc3-176e-45fe-8c26-7777fd68a9f3","Type":"ContainerDied","Data":"fc3d8a520d183435c178dc7b13638ed97649663913f344cc722f81987b6880e3"} Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.561497 5002 generic.go:334] "Generic (PLEG): container finished" podID="fbec4cc5-2873-4f93-aea5-13a34150d5e4" containerID="6a42979dfab6aa0023435f91aea6adbe75526ceff810c8f4f5b657bc88825dab" exitCode=0 Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.561558 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" event={"ID":"fbec4cc5-2873-4f93-aea5-13a34150d5e4","Type":"ContainerDied","Data":"6a42979dfab6aa0023435f91aea6adbe75526ceff810c8f4f5b657bc88825dab"} Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.603049 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.667972 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.776049 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbec4cc5-2873-4f93-aea5-13a34150d5e4-serving-cert\") pod \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.776114 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-client-ca\") pod \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.776176 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-proxy-ca-bundles\") pod \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.776216 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvk8m\" (UniqueName: \"kubernetes.io/projected/fbec4cc5-2873-4f93-aea5-13a34150d5e4-kube-api-access-gvk8m\") pod \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.776290 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vj6jd\" (UniqueName: \"kubernetes.io/projected/d5031fc3-176e-45fe-8c26-7777fd68a9f3-kube-api-access-vj6jd\") pod \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.776319 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5031fc3-176e-45fe-8c26-7777fd68a9f3-serving-cert\") pod \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.776374 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-config\") pod \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\" (UID: \"d5031fc3-176e-45fe-8c26-7777fd68a9f3\") " Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.776399 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbec4cc5-2873-4f93-aea5-13a34150d5e4-config\") pod \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.776471 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbec4cc5-2873-4f93-aea5-13a34150d5e4-client-ca\") pod \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\" (UID: \"fbec4cc5-2873-4f93-aea5-13a34150d5e4\") " Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.777355 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbec4cc5-2873-4f93-aea5-13a34150d5e4-client-ca" (OuterVolumeSpecName: "client-ca") pod "fbec4cc5-2873-4f93-aea5-13a34150d5e4" (UID: "fbec4cc5-2873-4f93-aea5-13a34150d5e4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.777552 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-client-ca" (OuterVolumeSpecName: "client-ca") pod "d5031fc3-176e-45fe-8c26-7777fd68a9f3" (UID: "d5031fc3-176e-45fe-8c26-7777fd68a9f3"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.777549 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d5031fc3-176e-45fe-8c26-7777fd68a9f3" (UID: "d5031fc3-176e-45fe-8c26-7777fd68a9f3"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.777643 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbec4cc5-2873-4f93-aea5-13a34150d5e4-config" (OuterVolumeSpecName: "config") pod "fbec4cc5-2873-4f93-aea5-13a34150d5e4" (UID: "fbec4cc5-2873-4f93-aea5-13a34150d5e4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.777807 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-config" (OuterVolumeSpecName: "config") pod "d5031fc3-176e-45fe-8c26-7777fd68a9f3" (UID: "d5031fc3-176e-45fe-8c26-7777fd68a9f3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.782777 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbec4cc5-2873-4f93-aea5-13a34150d5e4-kube-api-access-gvk8m" (OuterVolumeSpecName: "kube-api-access-gvk8m") pod "fbec4cc5-2873-4f93-aea5-13a34150d5e4" (UID: "fbec4cc5-2873-4f93-aea5-13a34150d5e4"). InnerVolumeSpecName "kube-api-access-gvk8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.782871 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5031fc3-176e-45fe-8c26-7777fd68a9f3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d5031fc3-176e-45fe-8c26-7777fd68a9f3" (UID: "d5031fc3-176e-45fe-8c26-7777fd68a9f3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.782878 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5031fc3-176e-45fe-8c26-7777fd68a9f3-kube-api-access-vj6jd" (OuterVolumeSpecName: "kube-api-access-vj6jd") pod "d5031fc3-176e-45fe-8c26-7777fd68a9f3" (UID: "d5031fc3-176e-45fe-8c26-7777fd68a9f3"). InnerVolumeSpecName "kube-api-access-vj6jd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.783297 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbec4cc5-2873-4f93-aea5-13a34150d5e4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fbec4cc5-2873-4f93-aea5-13a34150d5e4" (UID: "fbec4cc5-2873-4f93-aea5-13a34150d5e4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.878449 5002 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbec4cc5-2873-4f93-aea5-13a34150d5e4-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.878505 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbec4cc5-2873-4f93-aea5-13a34150d5e4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.878527 5002 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.878550 5002 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.878598 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvk8m\" (UniqueName: \"kubernetes.io/projected/fbec4cc5-2873-4f93-aea5-13a34150d5e4-kube-api-access-gvk8m\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.878622 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vj6jd\" (UniqueName: \"kubernetes.io/projected/d5031fc3-176e-45fe-8c26-7777fd68a9f3-kube-api-access-vj6jd\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.878638 5002 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5031fc3-176e-45fe-8c26-7777fd68a9f3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.878672 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5031fc3-176e-45fe-8c26-7777fd68a9f3-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:35 crc kubenswrapper[5002]: I1203 16:37:35.878704 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbec4cc5-2873-4f93-aea5-13a34150d5e4-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.568521 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" event={"ID":"d5031fc3-176e-45fe-8c26-7777fd68a9f3","Type":"ContainerDied","Data":"c70ced1dbf443582a67078371c6fe5af92eda774368538f92c82fb826920f29d"} Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.568589 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58c8c7d596-4xmgw" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.568612 5002 scope.go:117] "RemoveContainer" containerID="fc3d8a520d183435c178dc7b13638ed97649663913f344cc722f81987b6880e3" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.571343 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" event={"ID":"fbec4cc5-2873-4f93-aea5-13a34150d5e4","Type":"ContainerDied","Data":"5e7afa49e88976bf05ae46a0b823f8ba9694fea2200676e218fc0749ecdef0f7"} Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.571420 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.587293 5002 scope.go:117] "RemoveContainer" containerID="6a42979dfab6aa0023435f91aea6adbe75526ceff810c8f4f5b657bc88825dab" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.604906 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll"] Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.610640 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58464fc68b-lvmll"] Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.616273 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-58c8c7d596-4xmgw"] Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.622637 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-58c8c7d596-4xmgw"] Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.810579 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9"] Dec 03 16:37:36 crc kubenswrapper[5002]: E1203 16:37:36.811295 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5031fc3-176e-45fe-8c26-7777fd68a9f3" containerName="controller-manager" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.811317 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5031fc3-176e-45fe-8c26-7777fd68a9f3" containerName="controller-manager" Dec 03 16:37:36 crc kubenswrapper[5002]: E1203 16:37:36.811333 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbec4cc5-2873-4f93-aea5-13a34150d5e4" containerName="route-controller-manager" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.811341 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbec4cc5-2873-4f93-aea5-13a34150d5e4" containerName="route-controller-manager" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.811461 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbec4cc5-2873-4f93-aea5-13a34150d5e4" containerName="route-controller-manager" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.811477 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5031fc3-176e-45fe-8c26-7777fd68a9f3" containerName="controller-manager" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.812042 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.813956 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.814495 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.814568 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.814582 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.814587 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.815012 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.823979 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5f4675d97d-hwjjl"] Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.825028 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.828116 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.828244 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.828427 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.828687 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.828829 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.829017 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.831132 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5f4675d97d-hwjjl"] Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.835671 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.838368 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9"] Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.848535 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5031fc3-176e-45fe-8c26-7777fd68a9f3" path="/var/lib/kubelet/pods/d5031fc3-176e-45fe-8c26-7777fd68a9f3/volumes" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.849142 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbec4cc5-2873-4f93-aea5-13a34150d5e4" path="/var/lib/kubelet/pods/fbec4cc5-2873-4f93-aea5-13a34150d5e4/volumes" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.993868 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-client-ca\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.993944 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr9gp\" (UniqueName: \"kubernetes.io/projected/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-kube-api-access-tr9gp\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.994580 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zbpw\" (UniqueName: \"kubernetes.io/projected/bc0d3e90-616f-457a-8653-b168b0fdef54-kube-api-access-2zbpw\") pod \"route-controller-manager-75bc4c5ffd-5sbh9\" (UID: \"bc0d3e90-616f-457a-8653-b168b0fdef54\") " pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.994645 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-proxy-ca-bundles\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.994864 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-serving-cert\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.995017 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc0d3e90-616f-457a-8653-b168b0fdef54-config\") pod \"route-controller-manager-75bc4c5ffd-5sbh9\" (UID: \"bc0d3e90-616f-457a-8653-b168b0fdef54\") " pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.995153 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-config\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.995214 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc0d3e90-616f-457a-8653-b168b0fdef54-serving-cert\") pod \"route-controller-manager-75bc4c5ffd-5sbh9\" (UID: \"bc0d3e90-616f-457a-8653-b168b0fdef54\") " pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:36 crc kubenswrapper[5002]: I1203 16:37:36.995264 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc0d3e90-616f-457a-8653-b168b0fdef54-client-ca\") pod \"route-controller-manager-75bc4c5ffd-5sbh9\" (UID: \"bc0d3e90-616f-457a-8653-b168b0fdef54\") " pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.096341 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-serving-cert\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.096408 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc0d3e90-616f-457a-8653-b168b0fdef54-config\") pod \"route-controller-manager-75bc4c5ffd-5sbh9\" (UID: \"bc0d3e90-616f-457a-8653-b168b0fdef54\") " pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.096455 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-config\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.096483 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc0d3e90-616f-457a-8653-b168b0fdef54-serving-cert\") pod \"route-controller-manager-75bc4c5ffd-5sbh9\" (UID: \"bc0d3e90-616f-457a-8653-b168b0fdef54\") " pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.096506 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc0d3e90-616f-457a-8653-b168b0fdef54-client-ca\") pod \"route-controller-manager-75bc4c5ffd-5sbh9\" (UID: \"bc0d3e90-616f-457a-8653-b168b0fdef54\") " pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.096556 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-client-ca\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.096590 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr9gp\" (UniqueName: \"kubernetes.io/projected/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-kube-api-access-tr9gp\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.096627 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zbpw\" (UniqueName: \"kubernetes.io/projected/bc0d3e90-616f-457a-8653-b168b0fdef54-kube-api-access-2zbpw\") pod \"route-controller-manager-75bc4c5ffd-5sbh9\" (UID: \"bc0d3e90-616f-457a-8653-b168b0fdef54\") " pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.096654 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-proxy-ca-bundles\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.098281 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-proxy-ca-bundles\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.098319 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-client-ca\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.098478 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc0d3e90-616f-457a-8653-b168b0fdef54-client-ca\") pod \"route-controller-manager-75bc4c5ffd-5sbh9\" (UID: \"bc0d3e90-616f-457a-8653-b168b0fdef54\") " pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.099515 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc0d3e90-616f-457a-8653-b168b0fdef54-config\") pod \"route-controller-manager-75bc4c5ffd-5sbh9\" (UID: \"bc0d3e90-616f-457a-8653-b168b0fdef54\") " pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.100568 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-config\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.107298 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc0d3e90-616f-457a-8653-b168b0fdef54-serving-cert\") pod \"route-controller-manager-75bc4c5ffd-5sbh9\" (UID: \"bc0d3e90-616f-457a-8653-b168b0fdef54\") " pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.110421 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-serving-cert\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.116083 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr9gp\" (UniqueName: \"kubernetes.io/projected/1379a02c-d97e-43ff-99b0-22ebe8d0f6a8-kube-api-access-tr9gp\") pod \"controller-manager-5f4675d97d-hwjjl\" (UID: \"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8\") " pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.122377 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zbpw\" (UniqueName: \"kubernetes.io/projected/bc0d3e90-616f-457a-8653-b168b0fdef54-kube-api-access-2zbpw\") pod \"route-controller-manager-75bc4c5ffd-5sbh9\" (UID: \"bc0d3e90-616f-457a-8653-b168b0fdef54\") " pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.135703 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.153263 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.357484 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9"] Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.402381 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5f4675d97d-hwjjl"] Dec 03 16:37:37 crc kubenswrapper[5002]: W1203 16:37:37.406488 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1379a02c_d97e_43ff_99b0_22ebe8d0f6a8.slice/crio-f3b9b304367d3d61bf581188e9bc946c5b035c284b6a0332d5fafe3dfdfbd556 WatchSource:0}: Error finding container f3b9b304367d3d61bf581188e9bc946c5b035c284b6a0332d5fafe3dfdfbd556: Status 404 returned error can't find the container with id f3b9b304367d3d61bf581188e9bc946c5b035c284b6a0332d5fafe3dfdfbd556 Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.579248 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" event={"ID":"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8","Type":"ContainerStarted","Data":"0fe880158f9033bca3c08420af60477a0cb72fff876d934b45d9afcc289b3935"} Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.581194 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" event={"ID":"1379a02c-d97e-43ff-99b0-22ebe8d0f6a8","Type":"ContainerStarted","Data":"f3b9b304367d3d61bf581188e9bc946c5b035c284b6a0332d5fafe3dfdfbd556"} Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.581381 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.583260 5002 patch_prober.go:28] interesting pod/controller-manager-5f4675d97d-hwjjl container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" start-of-body= Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.583422 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" podUID="1379a02c-d97e-43ff-99b0-22ebe8d0f6a8" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.585706 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" event={"ID":"bc0d3e90-616f-457a-8653-b168b0fdef54","Type":"ContainerStarted","Data":"99c5b1f05b5b69325b50bf36384f6290bbc609f351c79c5fc39833133f290ba4"} Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.585777 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" event={"ID":"bc0d3e90-616f-457a-8653-b168b0fdef54","Type":"ContainerStarted","Data":"8490259c78f7b7f248a8c9bba706adbed97a57d47c7b15af10ec72c28d27e082"} Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.586242 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.588229 5002 patch_prober.go:28] interesting pod/route-controller-manager-75bc4c5ffd-5sbh9 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" start-of-body= Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.588403 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" podUID="bc0d3e90-616f-457a-8653-b168b0fdef54" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.606148 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" podStartSLOduration=2.606122876 podStartE2EDuration="2.606122876s" podCreationTimestamp="2025-12-03 16:37:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:37:37.600344838 +0000 UTC m=+381.014166726" watchObservedRunningTime="2025-12-03 16:37:37.606122876 +0000 UTC m=+381.019944764" Dec 03 16:37:37 crc kubenswrapper[5002]: I1203 16:37:37.637794 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" podStartSLOduration=2.637758927 podStartE2EDuration="2.637758927s" podCreationTimestamp="2025-12-03 16:37:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:37:37.633435568 +0000 UTC m=+381.047257466" watchObservedRunningTime="2025-12-03 16:37:37.637758927 +0000 UTC m=+381.051580815" Dec 03 16:37:38 crc kubenswrapper[5002]: I1203 16:37:38.598153 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-75bc4c5ffd-5sbh9" Dec 03 16:37:38 crc kubenswrapper[5002]: I1203 16:37:38.599214 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5f4675d97d-hwjjl" Dec 03 16:37:44 crc kubenswrapper[5002]: I1203 16:37:44.667600 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-grml5" Dec 03 16:37:44 crc kubenswrapper[5002]: I1203 16:37:44.741044 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-6qpdx"] Dec 03 16:37:50 crc kubenswrapper[5002]: I1203 16:37:50.916779 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:37:50 crc kubenswrapper[5002]: I1203 16:37:50.917770 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.629097 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2rrfp"] Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.650052 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gtbgz"] Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.650491 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gtbgz" podUID="772ffd36-1d82-4493-96bd-09b67515116f" containerName="registry-server" containerID="cri-o://8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130" gracePeriod=30 Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.653586 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qqqkv"] Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.653915 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" podUID="b1e5478b-5439-41eb-b83b-700e37123781" containerName="marketplace-operator" containerID="cri-o://804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9" gracePeriod=30 Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.665101 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9wqft"] Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.665505 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9wqft" podUID="04e2999b-f568-479e-b811-44a0d9082524" containerName="registry-server" containerID="cri-o://d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961" gracePeriod=30 Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.681073 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4qv6m"] Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.682085 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.687247 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9j4z7"] Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.688177 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9j4z7" podUID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" containerName="registry-server" containerID="cri-o://abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51" gracePeriod=30 Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.705140 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2rrfp" podUID="cc2070de-fd9c-401b-9978-70c2fe35c939" containerName="registry-server" containerID="cri-o://2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc" gracePeriod=30 Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.741970 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4qv6m"] Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.766742 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7wh7\" (UniqueName: \"kubernetes.io/projected/01e55b47-d730-453c-a240-6e6aeda14dc1-kube-api-access-q7wh7\") pod \"marketplace-operator-79b997595-4qv6m\" (UID: \"01e55b47-d730-453c-a240-6e6aeda14dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.766909 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/01e55b47-d730-453c-a240-6e6aeda14dc1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4qv6m\" (UID: \"01e55b47-d730-453c-a240-6e6aeda14dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.766970 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/01e55b47-d730-453c-a240-6e6aeda14dc1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4qv6m\" (UID: \"01e55b47-d730-453c-a240-6e6aeda14dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.868277 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/01e55b47-d730-453c-a240-6e6aeda14dc1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4qv6m\" (UID: \"01e55b47-d730-453c-a240-6e6aeda14dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.868351 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7wh7\" (UniqueName: \"kubernetes.io/projected/01e55b47-d730-453c-a240-6e6aeda14dc1-kube-api-access-q7wh7\") pod \"marketplace-operator-79b997595-4qv6m\" (UID: \"01e55b47-d730-453c-a240-6e6aeda14dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.868409 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/01e55b47-d730-453c-a240-6e6aeda14dc1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4qv6m\" (UID: \"01e55b47-d730-453c-a240-6e6aeda14dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.871453 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/01e55b47-d730-453c-a240-6e6aeda14dc1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4qv6m\" (UID: \"01e55b47-d730-453c-a240-6e6aeda14dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.882185 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/01e55b47-d730-453c-a240-6e6aeda14dc1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4qv6m\" (UID: \"01e55b47-d730-453c-a240-6e6aeda14dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:56 crc kubenswrapper[5002]: E1203 16:37:56.895467 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51 is running failed: container process not found" containerID="abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 16:37:56 crc kubenswrapper[5002]: E1203 16:37:56.896283 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51 is running failed: container process not found" containerID="abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 16:37:56 crc kubenswrapper[5002]: E1203 16:37:56.896887 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51 is running failed: container process not found" containerID="abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 16:37:56 crc kubenswrapper[5002]: E1203 16:37:56.897085 5002 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-9j4z7" podUID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" containerName="registry-server" Dec 03 16:37:56 crc kubenswrapper[5002]: I1203 16:37:56.898817 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7wh7\" (UniqueName: \"kubernetes.io/projected/01e55b47-d730-453c-a240-6e6aeda14dc1-kube-api-access-q7wh7\") pod \"marketplace-operator-79b997595-4qv6m\" (UID: \"01e55b47-d730-453c-a240-6e6aeda14dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.009052 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.185316 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.276436 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b1e5478b-5439-41eb-b83b-700e37123781-marketplace-operator-metrics\") pod \"b1e5478b-5439-41eb-b83b-700e37123781\" (UID: \"b1e5478b-5439-41eb-b83b-700e37123781\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.276495 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b1e5478b-5439-41eb-b83b-700e37123781-marketplace-trusted-ca\") pod \"b1e5478b-5439-41eb-b83b-700e37123781\" (UID: \"b1e5478b-5439-41eb-b83b-700e37123781\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.276625 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drz44\" (UniqueName: \"kubernetes.io/projected/b1e5478b-5439-41eb-b83b-700e37123781-kube-api-access-drz44\") pod \"b1e5478b-5439-41eb-b83b-700e37123781\" (UID: \"b1e5478b-5439-41eb-b83b-700e37123781\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.278446 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1e5478b-5439-41eb-b83b-700e37123781-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b1e5478b-5439-41eb-b83b-700e37123781" (UID: "b1e5478b-5439-41eb-b83b-700e37123781"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.287558 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1e5478b-5439-41eb-b83b-700e37123781-kube-api-access-drz44" (OuterVolumeSpecName: "kube-api-access-drz44") pod "b1e5478b-5439-41eb-b83b-700e37123781" (UID: "b1e5478b-5439-41eb-b83b-700e37123781"). InnerVolumeSpecName "kube-api-access-drz44". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.290434 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1e5478b-5439-41eb-b83b-700e37123781-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b1e5478b-5439-41eb-b83b-700e37123781" (UID: "b1e5478b-5439-41eb-b83b-700e37123781"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.379346 5002 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b1e5478b-5439-41eb-b83b-700e37123781-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.379390 5002 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b1e5478b-5439-41eb-b83b-700e37123781-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.379404 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drz44\" (UniqueName: \"kubernetes.io/projected/b1e5478b-5439-41eb-b83b-700e37123781-kube-api-access-drz44\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.394124 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.411914 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.451159 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.472220 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.480317 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04e2999b-f568-479e-b811-44a0d9082524-utilities\") pod \"04e2999b-f568-479e-b811-44a0d9082524\" (UID: \"04e2999b-f568-479e-b811-44a0d9082524\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.480357 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-utilities\") pod \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\" (UID: \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.480389 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fq9c9\" (UniqueName: \"kubernetes.io/projected/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-kube-api-access-fq9c9\") pod \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\" (UID: \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.480435 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04e2999b-f568-479e-b811-44a0d9082524-catalog-content\") pod \"04e2999b-f568-479e-b811-44a0d9082524\" (UID: \"04e2999b-f568-479e-b811-44a0d9082524\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.480462 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-catalog-content\") pod \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\" (UID: \"6b80fc07-401e-4ddd-8a97-c66cc66d68b5\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.480606 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc2070de-fd9c-401b-9978-70c2fe35c939-utilities\") pod \"cc2070de-fd9c-401b-9978-70c2fe35c939\" (UID: \"cc2070de-fd9c-401b-9978-70c2fe35c939\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.480670 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc2070de-fd9c-401b-9978-70c2fe35c939-catalog-content\") pod \"cc2070de-fd9c-401b-9978-70c2fe35c939\" (UID: \"cc2070de-fd9c-401b-9978-70c2fe35c939\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.480757 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tn5c\" (UniqueName: \"kubernetes.io/projected/cc2070de-fd9c-401b-9978-70c2fe35c939-kube-api-access-2tn5c\") pod \"cc2070de-fd9c-401b-9978-70c2fe35c939\" (UID: \"cc2070de-fd9c-401b-9978-70c2fe35c939\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.480792 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ppcw\" (UniqueName: \"kubernetes.io/projected/04e2999b-f568-479e-b811-44a0d9082524-kube-api-access-2ppcw\") pod \"04e2999b-f568-479e-b811-44a0d9082524\" (UID: \"04e2999b-f568-479e-b811-44a0d9082524\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.482964 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc2070de-fd9c-401b-9978-70c2fe35c939-utilities" (OuterVolumeSpecName: "utilities") pod "cc2070de-fd9c-401b-9978-70c2fe35c939" (UID: "cc2070de-fd9c-401b-9978-70c2fe35c939"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.483088 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04e2999b-f568-479e-b811-44a0d9082524-utilities" (OuterVolumeSpecName: "utilities") pod "04e2999b-f568-479e-b811-44a0d9082524" (UID: "04e2999b-f568-479e-b811-44a0d9082524"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.483840 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-utilities" (OuterVolumeSpecName: "utilities") pod "6b80fc07-401e-4ddd-8a97-c66cc66d68b5" (UID: "6b80fc07-401e-4ddd-8a97-c66cc66d68b5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.485699 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-kube-api-access-fq9c9" (OuterVolumeSpecName: "kube-api-access-fq9c9") pod "6b80fc07-401e-4ddd-8a97-c66cc66d68b5" (UID: "6b80fc07-401e-4ddd-8a97-c66cc66d68b5"). InnerVolumeSpecName "kube-api-access-fq9c9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.487516 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc2070de-fd9c-401b-9978-70c2fe35c939-kube-api-access-2tn5c" (OuterVolumeSpecName: "kube-api-access-2tn5c") pod "cc2070de-fd9c-401b-9978-70c2fe35c939" (UID: "cc2070de-fd9c-401b-9978-70c2fe35c939"). InnerVolumeSpecName "kube-api-access-2tn5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.490464 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04e2999b-f568-479e-b811-44a0d9082524-kube-api-access-2ppcw" (OuterVolumeSpecName: "kube-api-access-2ppcw") pod "04e2999b-f568-479e-b811-44a0d9082524" (UID: "04e2999b-f568-479e-b811-44a0d9082524"). InnerVolumeSpecName "kube-api-access-2ppcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.527016 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04e2999b-f568-479e-b811-44a0d9082524-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "04e2999b-f568-479e-b811-44a0d9082524" (UID: "04e2999b-f568-479e-b811-44a0d9082524"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.565857 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc2070de-fd9c-401b-9978-70c2fe35c939-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cc2070de-fd9c-401b-9978-70c2fe35c939" (UID: "cc2070de-fd9c-401b-9978-70c2fe35c939"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.582311 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hthnm\" (UniqueName: \"kubernetes.io/projected/772ffd36-1d82-4493-96bd-09b67515116f-kube-api-access-hthnm\") pod \"772ffd36-1d82-4493-96bd-09b67515116f\" (UID: \"772ffd36-1d82-4493-96bd-09b67515116f\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.582438 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/772ffd36-1d82-4493-96bd-09b67515116f-utilities\") pod \"772ffd36-1d82-4493-96bd-09b67515116f\" (UID: \"772ffd36-1d82-4493-96bd-09b67515116f\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.582467 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/772ffd36-1d82-4493-96bd-09b67515116f-catalog-content\") pod \"772ffd36-1d82-4493-96bd-09b67515116f\" (UID: \"772ffd36-1d82-4493-96bd-09b67515116f\") " Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.582868 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc2070de-fd9c-401b-9978-70c2fe35c939-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.582887 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc2070de-fd9c-401b-9978-70c2fe35c939-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.582900 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tn5c\" (UniqueName: \"kubernetes.io/projected/cc2070de-fd9c-401b-9978-70c2fe35c939-kube-api-access-2tn5c\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.582912 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ppcw\" (UniqueName: \"kubernetes.io/projected/04e2999b-f568-479e-b811-44a0d9082524-kube-api-access-2ppcw\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.582923 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04e2999b-f568-479e-b811-44a0d9082524-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.582932 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.582942 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fq9c9\" (UniqueName: \"kubernetes.io/projected/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-kube-api-access-fq9c9\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.582951 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04e2999b-f568-479e-b811-44a0d9082524-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.583869 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/772ffd36-1d82-4493-96bd-09b67515116f-utilities" (OuterVolumeSpecName: "utilities") pod "772ffd36-1d82-4493-96bd-09b67515116f" (UID: "772ffd36-1d82-4493-96bd-09b67515116f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.587677 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/772ffd36-1d82-4493-96bd-09b67515116f-kube-api-access-hthnm" (OuterVolumeSpecName: "kube-api-access-hthnm") pod "772ffd36-1d82-4493-96bd-09b67515116f" (UID: "772ffd36-1d82-4493-96bd-09b67515116f"). InnerVolumeSpecName "kube-api-access-hthnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.620786 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6b80fc07-401e-4ddd-8a97-c66cc66d68b5" (UID: "6b80fc07-401e-4ddd-8a97-c66cc66d68b5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.635893 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/772ffd36-1d82-4493-96bd-09b67515116f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "772ffd36-1d82-4493-96bd-09b67515116f" (UID: "772ffd36-1d82-4493-96bd-09b67515116f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.686488 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hthnm\" (UniqueName: \"kubernetes.io/projected/772ffd36-1d82-4493-96bd-09b67515116f-kube-api-access-hthnm\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.686526 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/772ffd36-1d82-4493-96bd-09b67515116f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.686538 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/772ffd36-1d82-4493-96bd-09b67515116f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.686547 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b80fc07-401e-4ddd-8a97-c66cc66d68b5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.694428 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4qv6m"] Dec 03 16:37:57 crc kubenswrapper[5002]: W1203 16:37:57.697081 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod01e55b47_d730_453c_a240_6e6aeda14dc1.slice/crio-d6a9e3522a00caa9a6a7b779d7e6c4026aba3cbd0816d27b92b28071972ab2a3 WatchSource:0}: Error finding container d6a9e3522a00caa9a6a7b779d7e6c4026aba3cbd0816d27b92b28071972ab2a3: Status 404 returned error can't find the container with id d6a9e3522a00caa9a6a7b779d7e6c4026aba3cbd0816d27b92b28071972ab2a3 Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.718311 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" event={"ID":"01e55b47-d730-453c-a240-6e6aeda14dc1","Type":"ContainerStarted","Data":"d6a9e3522a00caa9a6a7b779d7e6c4026aba3cbd0816d27b92b28071972ab2a3"} Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.721680 5002 generic.go:334] "Generic (PLEG): container finished" podID="04e2999b-f568-479e-b811-44a0d9082524" containerID="d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961" exitCode=0 Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.721803 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9wqft" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.721805 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9wqft" event={"ID":"04e2999b-f568-479e-b811-44a0d9082524","Type":"ContainerDied","Data":"d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961"} Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.721888 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9wqft" event={"ID":"04e2999b-f568-479e-b811-44a0d9082524","Type":"ContainerDied","Data":"4408883e9aa3eaa8e999476466d114661f14e4b31a633c351edf59ebfeb974ed"} Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.721964 5002 scope.go:117] "RemoveContainer" containerID="d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.725325 5002 generic.go:334] "Generic (PLEG): container finished" podID="cc2070de-fd9c-401b-9978-70c2fe35c939" containerID="2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc" exitCode=0 Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.725452 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2rrfp" event={"ID":"cc2070de-fd9c-401b-9978-70c2fe35c939","Type":"ContainerDied","Data":"2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc"} Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.725487 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2rrfp" event={"ID":"cc2070de-fd9c-401b-9978-70c2fe35c939","Type":"ContainerDied","Data":"34bb937eb63138831452d2f8273dd5a742962f0e38b19b2c9d1d895a0810727e"} Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.725926 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2rrfp" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.729599 5002 generic.go:334] "Generic (PLEG): container finished" podID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" containerID="abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51" exitCode=0 Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.729673 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9j4z7" event={"ID":"6b80fc07-401e-4ddd-8a97-c66cc66d68b5","Type":"ContainerDied","Data":"abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51"} Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.729697 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9j4z7" event={"ID":"6b80fc07-401e-4ddd-8a97-c66cc66d68b5","Type":"ContainerDied","Data":"2c5de96f49b54d7af2b0b59a42d2445e9eeb0294f153e53bcad9effedeed8028"} Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.729817 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9j4z7" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.738147 5002 generic.go:334] "Generic (PLEG): container finished" podID="772ffd36-1d82-4493-96bd-09b67515116f" containerID="8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130" exitCode=0 Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.738221 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtbgz" event={"ID":"772ffd36-1d82-4493-96bd-09b67515116f","Type":"ContainerDied","Data":"8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130"} Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.738254 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtbgz" event={"ID":"772ffd36-1d82-4493-96bd-09b67515116f","Type":"ContainerDied","Data":"3bfa92427e500a65aefd2c058bdd0bf1df474cc79cde31478b51d23a66f496ed"} Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.738322 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtbgz" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.744494 5002 generic.go:334] "Generic (PLEG): container finished" podID="b1e5478b-5439-41eb-b83b-700e37123781" containerID="804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9" exitCode=0 Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.744710 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" event={"ID":"b1e5478b-5439-41eb-b83b-700e37123781","Type":"ContainerDied","Data":"804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9"} Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.744849 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" event={"ID":"b1e5478b-5439-41eb-b83b-700e37123781","Type":"ContainerDied","Data":"c4541bed5b4eb57bc8d45d184c28461b45c8af21b22e4cb6161037f26c108309"} Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.745028 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qqqkv" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.746973 5002 scope.go:117] "RemoveContainer" containerID="7af431e7f8ba1d1ed3b406f384846ad8018f34ba3a09a7cb9242fc0ca88367b2" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.769721 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9wqft"] Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.771891 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9wqft"] Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.789948 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2rrfp"] Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.795687 5002 scope.go:117] "RemoveContainer" containerID="836da8d2251401e5210972881944254de5598534bdfd6c393c3f5c7cb95445bb" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.801046 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2rrfp"] Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.808695 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9j4z7"] Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.809815 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9j4z7"] Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.822407 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gtbgz"] Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.825615 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gtbgz"] Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.834759 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qqqkv"] Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.839667 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qqqkv"] Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.853719 5002 scope.go:117] "RemoveContainer" containerID="d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961" Dec 03 16:37:57 crc kubenswrapper[5002]: E1203 16:37:57.854342 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961\": container with ID starting with d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961 not found: ID does not exist" containerID="d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.854402 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961"} err="failed to get container status \"d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961\": rpc error: code = NotFound desc = could not find container \"d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961\": container with ID starting with d9d22fb07bfdf316dc0c730d8ef1c3218483208abb65338bce7c3e9d3af62961 not found: ID does not exist" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.854442 5002 scope.go:117] "RemoveContainer" containerID="7af431e7f8ba1d1ed3b406f384846ad8018f34ba3a09a7cb9242fc0ca88367b2" Dec 03 16:37:57 crc kubenswrapper[5002]: E1203 16:37:57.854861 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7af431e7f8ba1d1ed3b406f384846ad8018f34ba3a09a7cb9242fc0ca88367b2\": container with ID starting with 7af431e7f8ba1d1ed3b406f384846ad8018f34ba3a09a7cb9242fc0ca88367b2 not found: ID does not exist" containerID="7af431e7f8ba1d1ed3b406f384846ad8018f34ba3a09a7cb9242fc0ca88367b2" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.854909 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7af431e7f8ba1d1ed3b406f384846ad8018f34ba3a09a7cb9242fc0ca88367b2"} err="failed to get container status \"7af431e7f8ba1d1ed3b406f384846ad8018f34ba3a09a7cb9242fc0ca88367b2\": rpc error: code = NotFound desc = could not find container \"7af431e7f8ba1d1ed3b406f384846ad8018f34ba3a09a7cb9242fc0ca88367b2\": container with ID starting with 7af431e7f8ba1d1ed3b406f384846ad8018f34ba3a09a7cb9242fc0ca88367b2 not found: ID does not exist" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.854942 5002 scope.go:117] "RemoveContainer" containerID="836da8d2251401e5210972881944254de5598534bdfd6c393c3f5c7cb95445bb" Dec 03 16:37:57 crc kubenswrapper[5002]: E1203 16:37:57.855338 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"836da8d2251401e5210972881944254de5598534bdfd6c393c3f5c7cb95445bb\": container with ID starting with 836da8d2251401e5210972881944254de5598534bdfd6c393c3f5c7cb95445bb not found: ID does not exist" containerID="836da8d2251401e5210972881944254de5598534bdfd6c393c3f5c7cb95445bb" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.855418 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"836da8d2251401e5210972881944254de5598534bdfd6c393c3f5c7cb95445bb"} err="failed to get container status \"836da8d2251401e5210972881944254de5598534bdfd6c393c3f5c7cb95445bb\": rpc error: code = NotFound desc = could not find container \"836da8d2251401e5210972881944254de5598534bdfd6c393c3f5c7cb95445bb\": container with ID starting with 836da8d2251401e5210972881944254de5598534bdfd6c393c3f5c7cb95445bb not found: ID does not exist" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.855472 5002 scope.go:117] "RemoveContainer" containerID="2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.886857 5002 scope.go:117] "RemoveContainer" containerID="744bf1aae9e1658d5f61e833c17778997490bdfa53dc9b1899d90fcb6523e820" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.902736 5002 scope.go:117] "RemoveContainer" containerID="dff97e580cf61ec2b05caf0a22b49b51e04db910a425a5e0c3017af244494ad7" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.929268 5002 scope.go:117] "RemoveContainer" containerID="2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc" Dec 03 16:37:57 crc kubenswrapper[5002]: E1203 16:37:57.930057 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc\": container with ID starting with 2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc not found: ID does not exist" containerID="2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.930154 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc"} err="failed to get container status \"2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc\": rpc error: code = NotFound desc = could not find container \"2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc\": container with ID starting with 2984dfa7af8b03073833ddc9265d624282bfd92d2aa58d3f84a46f3a5657efcc not found: ID does not exist" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.930212 5002 scope.go:117] "RemoveContainer" containerID="744bf1aae9e1658d5f61e833c17778997490bdfa53dc9b1899d90fcb6523e820" Dec 03 16:37:57 crc kubenswrapper[5002]: E1203 16:37:57.930843 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"744bf1aae9e1658d5f61e833c17778997490bdfa53dc9b1899d90fcb6523e820\": container with ID starting with 744bf1aae9e1658d5f61e833c17778997490bdfa53dc9b1899d90fcb6523e820 not found: ID does not exist" containerID="744bf1aae9e1658d5f61e833c17778997490bdfa53dc9b1899d90fcb6523e820" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.930887 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"744bf1aae9e1658d5f61e833c17778997490bdfa53dc9b1899d90fcb6523e820"} err="failed to get container status \"744bf1aae9e1658d5f61e833c17778997490bdfa53dc9b1899d90fcb6523e820\": rpc error: code = NotFound desc = could not find container \"744bf1aae9e1658d5f61e833c17778997490bdfa53dc9b1899d90fcb6523e820\": container with ID starting with 744bf1aae9e1658d5f61e833c17778997490bdfa53dc9b1899d90fcb6523e820 not found: ID does not exist" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.930918 5002 scope.go:117] "RemoveContainer" containerID="dff97e580cf61ec2b05caf0a22b49b51e04db910a425a5e0c3017af244494ad7" Dec 03 16:37:57 crc kubenswrapper[5002]: E1203 16:37:57.931420 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dff97e580cf61ec2b05caf0a22b49b51e04db910a425a5e0c3017af244494ad7\": container with ID starting with dff97e580cf61ec2b05caf0a22b49b51e04db910a425a5e0c3017af244494ad7 not found: ID does not exist" containerID="dff97e580cf61ec2b05caf0a22b49b51e04db910a425a5e0c3017af244494ad7" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.931578 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dff97e580cf61ec2b05caf0a22b49b51e04db910a425a5e0c3017af244494ad7"} err="failed to get container status \"dff97e580cf61ec2b05caf0a22b49b51e04db910a425a5e0c3017af244494ad7\": rpc error: code = NotFound desc = could not find container \"dff97e580cf61ec2b05caf0a22b49b51e04db910a425a5e0c3017af244494ad7\": container with ID starting with dff97e580cf61ec2b05caf0a22b49b51e04db910a425a5e0c3017af244494ad7 not found: ID does not exist" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.931728 5002 scope.go:117] "RemoveContainer" containerID="abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.949604 5002 scope.go:117] "RemoveContainer" containerID="0c1057970f9718f6b12ecebd0479ea20715b3391aa8439020886bc41f36391c3" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.974948 5002 scope.go:117] "RemoveContainer" containerID="1ba60d80dcc6f03a13c22f47219efc706abc545da1a047961c9700b8ad7251c0" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.991999 5002 scope.go:117] "RemoveContainer" containerID="abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51" Dec 03 16:37:57 crc kubenswrapper[5002]: E1203 16:37:57.992786 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51\": container with ID starting with abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51 not found: ID does not exist" containerID="abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.992823 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51"} err="failed to get container status \"abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51\": rpc error: code = NotFound desc = could not find container \"abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51\": container with ID starting with abeed85bc765b2b708db27c5c267c4a6c46fe1065bb24e1107332cc137d15a51 not found: ID does not exist" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.992853 5002 scope.go:117] "RemoveContainer" containerID="0c1057970f9718f6b12ecebd0479ea20715b3391aa8439020886bc41f36391c3" Dec 03 16:37:57 crc kubenswrapper[5002]: E1203 16:37:57.993199 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c1057970f9718f6b12ecebd0479ea20715b3391aa8439020886bc41f36391c3\": container with ID starting with 0c1057970f9718f6b12ecebd0479ea20715b3391aa8439020886bc41f36391c3 not found: ID does not exist" containerID="0c1057970f9718f6b12ecebd0479ea20715b3391aa8439020886bc41f36391c3" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.993250 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c1057970f9718f6b12ecebd0479ea20715b3391aa8439020886bc41f36391c3"} err="failed to get container status \"0c1057970f9718f6b12ecebd0479ea20715b3391aa8439020886bc41f36391c3\": rpc error: code = NotFound desc = could not find container \"0c1057970f9718f6b12ecebd0479ea20715b3391aa8439020886bc41f36391c3\": container with ID starting with 0c1057970f9718f6b12ecebd0479ea20715b3391aa8439020886bc41f36391c3 not found: ID does not exist" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.993293 5002 scope.go:117] "RemoveContainer" containerID="1ba60d80dcc6f03a13c22f47219efc706abc545da1a047961c9700b8ad7251c0" Dec 03 16:37:57 crc kubenswrapper[5002]: E1203 16:37:57.993609 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ba60d80dcc6f03a13c22f47219efc706abc545da1a047961c9700b8ad7251c0\": container with ID starting with 1ba60d80dcc6f03a13c22f47219efc706abc545da1a047961c9700b8ad7251c0 not found: ID does not exist" containerID="1ba60d80dcc6f03a13c22f47219efc706abc545da1a047961c9700b8ad7251c0" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.993636 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ba60d80dcc6f03a13c22f47219efc706abc545da1a047961c9700b8ad7251c0"} err="failed to get container status \"1ba60d80dcc6f03a13c22f47219efc706abc545da1a047961c9700b8ad7251c0\": rpc error: code = NotFound desc = could not find container \"1ba60d80dcc6f03a13c22f47219efc706abc545da1a047961c9700b8ad7251c0\": container with ID starting with 1ba60d80dcc6f03a13c22f47219efc706abc545da1a047961c9700b8ad7251c0 not found: ID does not exist" Dec 03 16:37:57 crc kubenswrapper[5002]: I1203 16:37:57.993651 5002 scope.go:117] "RemoveContainer" containerID="8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.010538 5002 scope.go:117] "RemoveContainer" containerID="f32204220e5f80511041ea4f9cc9b531f31b783e5f8c721051269bfd5f418cd3" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.027447 5002 scope.go:117] "RemoveContainer" containerID="db25800c655b387ce24dd1427bda79b42b6fdc9a0d3c6df12b71abcf1913aa0f" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.043786 5002 scope.go:117] "RemoveContainer" containerID="8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.044659 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130\": container with ID starting with 8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130 not found: ID does not exist" containerID="8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.044696 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130"} err="failed to get container status \"8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130\": rpc error: code = NotFound desc = could not find container \"8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130\": container with ID starting with 8fd0fd1fb6f321bfb6cfb80def07597664a5e06b642af798f8a8e9fc0a530130 not found: ID does not exist" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.044727 5002 scope.go:117] "RemoveContainer" containerID="f32204220e5f80511041ea4f9cc9b531f31b783e5f8c721051269bfd5f418cd3" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.045233 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f32204220e5f80511041ea4f9cc9b531f31b783e5f8c721051269bfd5f418cd3\": container with ID starting with f32204220e5f80511041ea4f9cc9b531f31b783e5f8c721051269bfd5f418cd3 not found: ID does not exist" containerID="f32204220e5f80511041ea4f9cc9b531f31b783e5f8c721051269bfd5f418cd3" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.045268 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f32204220e5f80511041ea4f9cc9b531f31b783e5f8c721051269bfd5f418cd3"} err="failed to get container status \"f32204220e5f80511041ea4f9cc9b531f31b783e5f8c721051269bfd5f418cd3\": rpc error: code = NotFound desc = could not find container \"f32204220e5f80511041ea4f9cc9b531f31b783e5f8c721051269bfd5f418cd3\": container with ID starting with f32204220e5f80511041ea4f9cc9b531f31b783e5f8c721051269bfd5f418cd3 not found: ID does not exist" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.045289 5002 scope.go:117] "RemoveContainer" containerID="db25800c655b387ce24dd1427bda79b42b6fdc9a0d3c6df12b71abcf1913aa0f" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.045833 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db25800c655b387ce24dd1427bda79b42b6fdc9a0d3c6df12b71abcf1913aa0f\": container with ID starting with db25800c655b387ce24dd1427bda79b42b6fdc9a0d3c6df12b71abcf1913aa0f not found: ID does not exist" containerID="db25800c655b387ce24dd1427bda79b42b6fdc9a0d3c6df12b71abcf1913aa0f" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.045914 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db25800c655b387ce24dd1427bda79b42b6fdc9a0d3c6df12b71abcf1913aa0f"} err="failed to get container status \"db25800c655b387ce24dd1427bda79b42b6fdc9a0d3c6df12b71abcf1913aa0f\": rpc error: code = NotFound desc = could not find container \"db25800c655b387ce24dd1427bda79b42b6fdc9a0d3c6df12b71abcf1913aa0f\": container with ID starting with db25800c655b387ce24dd1427bda79b42b6fdc9a0d3c6df12b71abcf1913aa0f not found: ID does not exist" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.045967 5002 scope.go:117] "RemoveContainer" containerID="804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.061244 5002 scope.go:117] "RemoveContainer" containerID="049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.082535 5002 scope.go:117] "RemoveContainer" containerID="804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.083376 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9\": container with ID starting with 804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9 not found: ID does not exist" containerID="804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.083443 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9"} err="failed to get container status \"804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9\": rpc error: code = NotFound desc = could not find container \"804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9\": container with ID starting with 804b65f60ba0648a67254f279143f480fdb604f9650ccf98a3002bdc75a80de9 not found: ID does not exist" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.083494 5002 scope.go:117] "RemoveContainer" containerID="049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.084160 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54\": container with ID starting with 049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54 not found: ID does not exist" containerID="049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.084207 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54"} err="failed to get container status \"049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54\": rpc error: code = NotFound desc = could not find container \"049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54\": container with ID starting with 049a1b0db258bc76ac21808b2132b92ed635ae06ed4733ac276b727e7f94fb54 not found: ID does not exist" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.765015 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" event={"ID":"01e55b47-d730-453c-a240-6e6aeda14dc1","Type":"ContainerStarted","Data":"720deccd1f0922f2c2e737c8a19b67ed8123b9ca1a2138b5d72f7ea3f6b369c3"} Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.765530 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.771288 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.795204 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-4qv6m" podStartSLOduration=2.7951811319999997 podStartE2EDuration="2.795181132s" podCreationTimestamp="2025-12-03 16:37:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:37:58.790032151 +0000 UTC m=+402.203854079" watchObservedRunningTime="2025-12-03 16:37:58.795181132 +0000 UTC m=+402.209003030" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.856816 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04e2999b-f568-479e-b811-44a0d9082524" path="/var/lib/kubelet/pods/04e2999b-f568-479e-b811-44a0d9082524/volumes" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.857577 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" path="/var/lib/kubelet/pods/6b80fc07-401e-4ddd-8a97-c66cc66d68b5/volumes" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.860461 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="772ffd36-1d82-4493-96bd-09b67515116f" path="/var/lib/kubelet/pods/772ffd36-1d82-4493-96bd-09b67515116f/volumes" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.861655 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1e5478b-5439-41eb-b83b-700e37123781" path="/var/lib/kubelet/pods/b1e5478b-5439-41eb-b83b-700e37123781/volumes" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.862209 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc2070de-fd9c-401b-9978-70c2fe35c939" path="/var/lib/kubelet/pods/cc2070de-fd9c-401b-9978-70c2fe35c939/volumes" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863237 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nd4c5"] Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863432 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc2070de-fd9c-401b-9978-70c2fe35c939" containerName="extract-utilities" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863453 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc2070de-fd9c-401b-9978-70c2fe35c939" containerName="extract-utilities" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863467 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" containerName="registry-server" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863475 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" containerName="registry-server" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863489 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="772ffd36-1d82-4493-96bd-09b67515116f" containerName="extract-content" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863498 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="772ffd36-1d82-4493-96bd-09b67515116f" containerName="extract-content" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863509 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04e2999b-f568-479e-b811-44a0d9082524" containerName="registry-server" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863516 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="04e2999b-f568-479e-b811-44a0d9082524" containerName="registry-server" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863525 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="772ffd36-1d82-4493-96bd-09b67515116f" containerName="registry-server" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863532 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="772ffd36-1d82-4493-96bd-09b67515116f" containerName="registry-server" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863543 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" containerName="extract-content" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863550 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" containerName="extract-content" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863560 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" containerName="extract-utilities" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863567 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" containerName="extract-utilities" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863577 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1e5478b-5439-41eb-b83b-700e37123781" containerName="marketplace-operator" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863583 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1e5478b-5439-41eb-b83b-700e37123781" containerName="marketplace-operator" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863618 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04e2999b-f568-479e-b811-44a0d9082524" containerName="extract-content" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863629 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="04e2999b-f568-479e-b811-44a0d9082524" containerName="extract-content" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863640 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc2070de-fd9c-401b-9978-70c2fe35c939" containerName="registry-server" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863649 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc2070de-fd9c-401b-9978-70c2fe35c939" containerName="registry-server" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863661 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1e5478b-5439-41eb-b83b-700e37123781" containerName="marketplace-operator" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863670 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1e5478b-5439-41eb-b83b-700e37123781" containerName="marketplace-operator" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863701 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="772ffd36-1d82-4493-96bd-09b67515116f" containerName="extract-utilities" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863711 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="772ffd36-1d82-4493-96bd-09b67515116f" containerName="extract-utilities" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863724 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04e2999b-f568-479e-b811-44a0d9082524" containerName="extract-utilities" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863732 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="04e2999b-f568-479e-b811-44a0d9082524" containerName="extract-utilities" Dec 03 16:37:58 crc kubenswrapper[5002]: E1203 16:37:58.863765 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc2070de-fd9c-401b-9978-70c2fe35c939" containerName="extract-content" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863772 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc2070de-fd9c-401b-9978-70c2fe35c939" containerName="extract-content" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863886 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b80fc07-401e-4ddd-8a97-c66cc66d68b5" containerName="registry-server" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863900 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1e5478b-5439-41eb-b83b-700e37123781" containerName="marketplace-operator" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863908 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="772ffd36-1d82-4493-96bd-09b67515116f" containerName="registry-server" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863917 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="04e2999b-f568-479e-b811-44a0d9082524" containerName="registry-server" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863926 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1e5478b-5439-41eb-b83b-700e37123781" containerName="marketplace-operator" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.863934 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc2070de-fd9c-401b-9978-70c2fe35c939" containerName="registry-server" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.864757 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.866846 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nd4c5"] Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.872925 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.909344 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51d47103-19b4-4321-b9b3-45eff71e42ea-utilities\") pod \"redhat-marketplace-nd4c5\" (UID: \"51d47103-19b4-4321-b9b3-45eff71e42ea\") " pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.909657 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51d47103-19b4-4321-b9b3-45eff71e42ea-catalog-content\") pod \"redhat-marketplace-nd4c5\" (UID: \"51d47103-19b4-4321-b9b3-45eff71e42ea\") " pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:37:58 crc kubenswrapper[5002]: I1203 16:37:58.909989 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckcsc\" (UniqueName: \"kubernetes.io/projected/51d47103-19b4-4321-b9b3-45eff71e42ea-kube-api-access-ckcsc\") pod \"redhat-marketplace-nd4c5\" (UID: \"51d47103-19b4-4321-b9b3-45eff71e42ea\") " pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.012031 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckcsc\" (UniqueName: \"kubernetes.io/projected/51d47103-19b4-4321-b9b3-45eff71e42ea-kube-api-access-ckcsc\") pod \"redhat-marketplace-nd4c5\" (UID: \"51d47103-19b4-4321-b9b3-45eff71e42ea\") " pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.012160 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51d47103-19b4-4321-b9b3-45eff71e42ea-utilities\") pod \"redhat-marketplace-nd4c5\" (UID: \"51d47103-19b4-4321-b9b3-45eff71e42ea\") " pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.012218 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51d47103-19b4-4321-b9b3-45eff71e42ea-catalog-content\") pod \"redhat-marketplace-nd4c5\" (UID: \"51d47103-19b4-4321-b9b3-45eff71e42ea\") " pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.012900 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51d47103-19b4-4321-b9b3-45eff71e42ea-catalog-content\") pod \"redhat-marketplace-nd4c5\" (UID: \"51d47103-19b4-4321-b9b3-45eff71e42ea\") " pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.013252 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51d47103-19b4-4321-b9b3-45eff71e42ea-utilities\") pod \"redhat-marketplace-nd4c5\" (UID: \"51d47103-19b4-4321-b9b3-45eff71e42ea\") " pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.036947 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckcsc\" (UniqueName: \"kubernetes.io/projected/51d47103-19b4-4321-b9b3-45eff71e42ea-kube-api-access-ckcsc\") pod \"redhat-marketplace-nd4c5\" (UID: \"51d47103-19b4-4321-b9b3-45eff71e42ea\") " pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.060355 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vjcrp"] Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.061705 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.063969 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.066030 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vjcrp"] Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.114304 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46mtq\" (UniqueName: \"kubernetes.io/projected/14775129-525c-4d0d-9ba5-d28b6066c8ba-kube-api-access-46mtq\") pod \"redhat-operators-vjcrp\" (UID: \"14775129-525c-4d0d-9ba5-d28b6066c8ba\") " pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.114362 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14775129-525c-4d0d-9ba5-d28b6066c8ba-utilities\") pod \"redhat-operators-vjcrp\" (UID: \"14775129-525c-4d0d-9ba5-d28b6066c8ba\") " pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.114418 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14775129-525c-4d0d-9ba5-d28b6066c8ba-catalog-content\") pod \"redhat-operators-vjcrp\" (UID: \"14775129-525c-4d0d-9ba5-d28b6066c8ba\") " pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.186586 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.218441 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46mtq\" (UniqueName: \"kubernetes.io/projected/14775129-525c-4d0d-9ba5-d28b6066c8ba-kube-api-access-46mtq\") pod \"redhat-operators-vjcrp\" (UID: \"14775129-525c-4d0d-9ba5-d28b6066c8ba\") " pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.218508 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14775129-525c-4d0d-9ba5-d28b6066c8ba-utilities\") pod \"redhat-operators-vjcrp\" (UID: \"14775129-525c-4d0d-9ba5-d28b6066c8ba\") " pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.218581 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14775129-525c-4d0d-9ba5-d28b6066c8ba-catalog-content\") pod \"redhat-operators-vjcrp\" (UID: \"14775129-525c-4d0d-9ba5-d28b6066c8ba\") " pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.219239 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14775129-525c-4d0d-9ba5-d28b6066c8ba-utilities\") pod \"redhat-operators-vjcrp\" (UID: \"14775129-525c-4d0d-9ba5-d28b6066c8ba\") " pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.219321 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14775129-525c-4d0d-9ba5-d28b6066c8ba-catalog-content\") pod \"redhat-operators-vjcrp\" (UID: \"14775129-525c-4d0d-9ba5-d28b6066c8ba\") " pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.241574 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46mtq\" (UniqueName: \"kubernetes.io/projected/14775129-525c-4d0d-9ba5-d28b6066c8ba-kube-api-access-46mtq\") pod \"redhat-operators-vjcrp\" (UID: \"14775129-525c-4d0d-9ba5-d28b6066c8ba\") " pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.385375 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.606062 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nd4c5"] Dec 03 16:37:59 crc kubenswrapper[5002]: W1203 16:37:59.614254 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51d47103_19b4_4321_b9b3_45eff71e42ea.slice/crio-6be52943ea5472d7b26856c837e22406193a98ae7f9a6434d24758239abaff90 WatchSource:0}: Error finding container 6be52943ea5472d7b26856c837e22406193a98ae7f9a6434d24758239abaff90: Status 404 returned error can't find the container with id 6be52943ea5472d7b26856c837e22406193a98ae7f9a6434d24758239abaff90 Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.783842 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nd4c5" event={"ID":"51d47103-19b4-4321-b9b3-45eff71e42ea","Type":"ContainerStarted","Data":"b81b0ab08566c9a4e1bf1509a3253a541592f14a01a97ba963199a409b584edb"} Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.783889 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nd4c5" event={"ID":"51d47103-19b4-4321-b9b3-45eff71e42ea","Type":"ContainerStarted","Data":"6be52943ea5472d7b26856c837e22406193a98ae7f9a6434d24758239abaff90"} Dec 03 16:37:59 crc kubenswrapper[5002]: I1203 16:37:59.813854 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vjcrp"] Dec 03 16:37:59 crc kubenswrapper[5002]: W1203 16:37:59.843964 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14775129_525c_4d0d_9ba5_d28b6066c8ba.slice/crio-ceb7c4c7a0a7864d56efb95fc1d6d379db6ac6abc64d50c4d7c3e484e67e6e7f WatchSource:0}: Error finding container ceb7c4c7a0a7864d56efb95fc1d6d379db6ac6abc64d50c4d7c3e484e67e6e7f: Status 404 returned error can't find the container with id ceb7c4c7a0a7864d56efb95fc1d6d379db6ac6abc64d50c4d7c3e484e67e6e7f Dec 03 16:38:00 crc kubenswrapper[5002]: I1203 16:38:00.792831 5002 generic.go:334] "Generic (PLEG): container finished" podID="14775129-525c-4d0d-9ba5-d28b6066c8ba" containerID="d68a578c21bf07a9d0cc0dc158bde95c463df261d28b189c0e46f34ed767dfe2" exitCode=0 Dec 03 16:38:00 crc kubenswrapper[5002]: I1203 16:38:00.792887 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vjcrp" event={"ID":"14775129-525c-4d0d-9ba5-d28b6066c8ba","Type":"ContainerDied","Data":"d68a578c21bf07a9d0cc0dc158bde95c463df261d28b189c0e46f34ed767dfe2"} Dec 03 16:38:00 crc kubenswrapper[5002]: I1203 16:38:00.792941 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vjcrp" event={"ID":"14775129-525c-4d0d-9ba5-d28b6066c8ba","Type":"ContainerStarted","Data":"ceb7c4c7a0a7864d56efb95fc1d6d379db6ac6abc64d50c4d7c3e484e67e6e7f"} Dec 03 16:38:00 crc kubenswrapper[5002]: I1203 16:38:00.795407 5002 generic.go:334] "Generic (PLEG): container finished" podID="51d47103-19b4-4321-b9b3-45eff71e42ea" containerID="b81b0ab08566c9a4e1bf1509a3253a541592f14a01a97ba963199a409b584edb" exitCode=0 Dec 03 16:38:00 crc kubenswrapper[5002]: I1203 16:38:00.795429 5002 generic.go:334] "Generic (PLEG): container finished" podID="51d47103-19b4-4321-b9b3-45eff71e42ea" containerID="d8c6c200f9ce7269d343b8e3377c4655c876eeaa39dc05bd20927c8b09ecf4ae" exitCode=0 Dec 03 16:38:00 crc kubenswrapper[5002]: I1203 16:38:00.795532 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nd4c5" event={"ID":"51d47103-19b4-4321-b9b3-45eff71e42ea","Type":"ContainerDied","Data":"b81b0ab08566c9a4e1bf1509a3253a541592f14a01a97ba963199a409b584edb"} Dec 03 16:38:00 crc kubenswrapper[5002]: I1203 16:38:00.795604 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nd4c5" event={"ID":"51d47103-19b4-4321-b9b3-45eff71e42ea","Type":"ContainerDied","Data":"d8c6c200f9ce7269d343b8e3377c4655c876eeaa39dc05bd20927c8b09ecf4ae"} Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.255529 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6l48r"] Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.265220 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.266496 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6l48r"] Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.268105 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.350531 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1403248-15f1-4aa0-afba-bc2e29f01886-utilities\") pod \"community-operators-6l48r\" (UID: \"f1403248-15f1-4aa0-afba-bc2e29f01886\") " pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.350618 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq889\" (UniqueName: \"kubernetes.io/projected/f1403248-15f1-4aa0-afba-bc2e29f01886-kube-api-access-rq889\") pod \"community-operators-6l48r\" (UID: \"f1403248-15f1-4aa0-afba-bc2e29f01886\") " pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.350699 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1403248-15f1-4aa0-afba-bc2e29f01886-catalog-content\") pod \"community-operators-6l48r\" (UID: \"f1403248-15f1-4aa0-afba-bc2e29f01886\") " pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.451427 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l74ff"] Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.452590 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1403248-15f1-4aa0-afba-bc2e29f01886-catalog-content\") pod \"community-operators-6l48r\" (UID: \"f1403248-15f1-4aa0-afba-bc2e29f01886\") " pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.452726 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1403248-15f1-4aa0-afba-bc2e29f01886-utilities\") pod \"community-operators-6l48r\" (UID: \"f1403248-15f1-4aa0-afba-bc2e29f01886\") " pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.452769 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.452790 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq889\" (UniqueName: \"kubernetes.io/projected/f1403248-15f1-4aa0-afba-bc2e29f01886-kube-api-access-rq889\") pod \"community-operators-6l48r\" (UID: \"f1403248-15f1-4aa0-afba-bc2e29f01886\") " pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.454074 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1403248-15f1-4aa0-afba-bc2e29f01886-catalog-content\") pod \"community-operators-6l48r\" (UID: \"f1403248-15f1-4aa0-afba-bc2e29f01886\") " pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.454240 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1403248-15f1-4aa0-afba-bc2e29f01886-utilities\") pod \"community-operators-6l48r\" (UID: \"f1403248-15f1-4aa0-afba-bc2e29f01886\") " pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.458409 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.461522 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l74ff"] Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.481841 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq889\" (UniqueName: \"kubernetes.io/projected/f1403248-15f1-4aa0-afba-bc2e29f01886-kube-api-access-rq889\") pod \"community-operators-6l48r\" (UID: \"f1403248-15f1-4aa0-afba-bc2e29f01886\") " pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.554210 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad-utilities\") pod \"certified-operators-l74ff\" (UID: \"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad\") " pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.554592 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrgqf\" (UniqueName: \"kubernetes.io/projected/9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad-kube-api-access-lrgqf\") pod \"certified-operators-l74ff\" (UID: \"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad\") " pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.554657 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad-catalog-content\") pod \"certified-operators-l74ff\" (UID: \"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad\") " pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.602723 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.655187 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrgqf\" (UniqueName: \"kubernetes.io/projected/9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad-kube-api-access-lrgqf\") pod \"certified-operators-l74ff\" (UID: \"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad\") " pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.655232 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad-utilities\") pod \"certified-operators-l74ff\" (UID: \"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad\") " pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.655269 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad-catalog-content\") pod \"certified-operators-l74ff\" (UID: \"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad\") " pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.655700 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad-catalog-content\") pod \"certified-operators-l74ff\" (UID: \"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad\") " pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.655929 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad-utilities\") pod \"certified-operators-l74ff\" (UID: \"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad\") " pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.682120 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrgqf\" (UniqueName: \"kubernetes.io/projected/9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad-kube-api-access-lrgqf\") pod \"certified-operators-l74ff\" (UID: \"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad\") " pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.772995 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.805153 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vjcrp" event={"ID":"14775129-525c-4d0d-9ba5-d28b6066c8ba","Type":"ContainerStarted","Data":"0204d16e81fa37d57bd9c7894f53f94c199ac49185dc0d5ed9f88b13e9b27294"} Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.807324 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nd4c5" event={"ID":"51d47103-19b4-4321-b9b3-45eff71e42ea","Type":"ContainerStarted","Data":"bed23ad285ebcf853c3ef0344474cfcea53b2ac73c51b95c165982cbf18cb7b9"} Dec 03 16:38:01 crc kubenswrapper[5002]: I1203 16:38:01.857061 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nd4c5" podStartSLOduration=2.425979823 podStartE2EDuration="3.857042482s" podCreationTimestamp="2025-12-03 16:37:58 +0000 UTC" firstStartedPulling="2025-12-03 16:37:59.785566079 +0000 UTC m=+403.199387967" lastFinishedPulling="2025-12-03 16:38:01.216628738 +0000 UTC m=+404.630450626" observedRunningTime="2025-12-03 16:38:01.855946252 +0000 UTC m=+405.269768170" watchObservedRunningTime="2025-12-03 16:38:01.857042482 +0000 UTC m=+405.270864370" Dec 03 16:38:02 crc kubenswrapper[5002]: I1203 16:38:02.044711 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6l48r"] Dec 03 16:38:02 crc kubenswrapper[5002]: W1203 16:38:02.048789 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1403248_15f1_4aa0_afba_bc2e29f01886.slice/crio-68559925817ad32e21f62ce727ff51f9b045a0b1f0d4504eae7a99905df96c1e WatchSource:0}: Error finding container 68559925817ad32e21f62ce727ff51f9b045a0b1f0d4504eae7a99905df96c1e: Status 404 returned error can't find the container with id 68559925817ad32e21f62ce727ff51f9b045a0b1f0d4504eae7a99905df96c1e Dec 03 16:38:02 crc kubenswrapper[5002]: I1203 16:38:02.225734 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l74ff"] Dec 03 16:38:02 crc kubenswrapper[5002]: I1203 16:38:02.816963 5002 generic.go:334] "Generic (PLEG): container finished" podID="14775129-525c-4d0d-9ba5-d28b6066c8ba" containerID="0204d16e81fa37d57bd9c7894f53f94c199ac49185dc0d5ed9f88b13e9b27294" exitCode=0 Dec 03 16:38:02 crc kubenswrapper[5002]: I1203 16:38:02.817076 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vjcrp" event={"ID":"14775129-525c-4d0d-9ba5-d28b6066c8ba","Type":"ContainerDied","Data":"0204d16e81fa37d57bd9c7894f53f94c199ac49185dc0d5ed9f88b13e9b27294"} Dec 03 16:38:02 crc kubenswrapper[5002]: I1203 16:38:02.823145 5002 generic.go:334] "Generic (PLEG): container finished" podID="f1403248-15f1-4aa0-afba-bc2e29f01886" containerID="b052a386315c19bb6da7a52e8a48f786d44e73e8618e2cb6746a56b22b855306" exitCode=0 Dec 03 16:38:02 crc kubenswrapper[5002]: I1203 16:38:02.823232 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l48r" event={"ID":"f1403248-15f1-4aa0-afba-bc2e29f01886","Type":"ContainerDied","Data":"b052a386315c19bb6da7a52e8a48f786d44e73e8618e2cb6746a56b22b855306"} Dec 03 16:38:02 crc kubenswrapper[5002]: I1203 16:38:02.823271 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l48r" event={"ID":"f1403248-15f1-4aa0-afba-bc2e29f01886","Type":"ContainerStarted","Data":"68559925817ad32e21f62ce727ff51f9b045a0b1f0d4504eae7a99905df96c1e"} Dec 03 16:38:02 crc kubenswrapper[5002]: I1203 16:38:02.828683 5002 generic.go:334] "Generic (PLEG): container finished" podID="9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad" containerID="8c0a6424a60cb3e71049ae55aafe444df562385bc64dc61cc606fdafca8f5e83" exitCode=0 Dec 03 16:38:02 crc kubenswrapper[5002]: I1203 16:38:02.828800 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l74ff" event={"ID":"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad","Type":"ContainerDied","Data":"8c0a6424a60cb3e71049ae55aafe444df562385bc64dc61cc606fdafca8f5e83"} Dec 03 16:38:02 crc kubenswrapper[5002]: I1203 16:38:02.828845 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l74ff" event={"ID":"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad","Type":"ContainerStarted","Data":"dbfc77b51c2b3294cc169f62f006b0c66f2022f6306baf2bd11409c636b3bfb9"} Dec 03 16:38:03 crc kubenswrapper[5002]: I1203 16:38:03.838033 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vjcrp" event={"ID":"14775129-525c-4d0d-9ba5-d28b6066c8ba","Type":"ContainerStarted","Data":"9e37217d6bdd3c54ad15f5ce4d075438230d9e70a3e4e5435d5b943f26cfb6cb"} Dec 03 16:38:03 crc kubenswrapper[5002]: I1203 16:38:03.840870 5002 generic.go:334] "Generic (PLEG): container finished" podID="f1403248-15f1-4aa0-afba-bc2e29f01886" containerID="1d120120334ca64b3558bdca57f8b734fbced968e168a42766d3703c4b36955c" exitCode=0 Dec 03 16:38:03 crc kubenswrapper[5002]: I1203 16:38:03.840974 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l48r" event={"ID":"f1403248-15f1-4aa0-afba-bc2e29f01886","Type":"ContainerDied","Data":"1d120120334ca64b3558bdca57f8b734fbced968e168a42766d3703c4b36955c"} Dec 03 16:38:03 crc kubenswrapper[5002]: I1203 16:38:03.844690 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l74ff" event={"ID":"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad","Type":"ContainerStarted","Data":"4a04f321c35b05bd107e33c41c50efed267c1864c22aec3bd6e1d6546f78e24a"} Dec 03 16:38:03 crc kubenswrapper[5002]: I1203 16:38:03.864346 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vjcrp" podStartSLOduration=2.436152181 podStartE2EDuration="4.864324891s" podCreationTimestamp="2025-12-03 16:37:59 +0000 UTC" firstStartedPulling="2025-12-03 16:38:00.794262527 +0000 UTC m=+404.208084415" lastFinishedPulling="2025-12-03 16:38:03.222435247 +0000 UTC m=+406.636257125" observedRunningTime="2025-12-03 16:38:03.86060266 +0000 UTC m=+407.274424548" watchObservedRunningTime="2025-12-03 16:38:03.864324891 +0000 UTC m=+407.278146779" Dec 03 16:38:04 crc kubenswrapper[5002]: I1203 16:38:04.851773 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l48r" event={"ID":"f1403248-15f1-4aa0-afba-bc2e29f01886","Type":"ContainerStarted","Data":"718625b2b8cd5a16d0d3cdfd83ada3f4f6fa7907a7191c343966de829fc3ca53"} Dec 03 16:38:04 crc kubenswrapper[5002]: I1203 16:38:04.854526 5002 generic.go:334] "Generic (PLEG): container finished" podID="9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad" containerID="4a04f321c35b05bd107e33c41c50efed267c1864c22aec3bd6e1d6546f78e24a" exitCode=0 Dec 03 16:38:04 crc kubenswrapper[5002]: I1203 16:38:04.854633 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l74ff" event={"ID":"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad","Type":"ContainerDied","Data":"4a04f321c35b05bd107e33c41c50efed267c1864c22aec3bd6e1d6546f78e24a"} Dec 03 16:38:04 crc kubenswrapper[5002]: I1203 16:38:04.879517 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6l48r" podStartSLOduration=2.455199931 podStartE2EDuration="3.879490685s" podCreationTimestamp="2025-12-03 16:38:01 +0000 UTC" firstStartedPulling="2025-12-03 16:38:02.824773551 +0000 UTC m=+406.238595439" lastFinishedPulling="2025-12-03 16:38:04.249064305 +0000 UTC m=+407.662886193" observedRunningTime="2025-12-03 16:38:04.875072965 +0000 UTC m=+408.288894853" watchObservedRunningTime="2025-12-03 16:38:04.879490685 +0000 UTC m=+408.293312583" Dec 03 16:38:06 crc kubenswrapper[5002]: I1203 16:38:06.870510 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l74ff" event={"ID":"9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad","Type":"ContainerStarted","Data":"6633c40c32b7bdbdc2c94d1bbe9f180effa4b00250a8c214990653c4cc56cf04"} Dec 03 16:38:06 crc kubenswrapper[5002]: I1203 16:38:06.900974 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l74ff" podStartSLOduration=3.500258391 podStartE2EDuration="5.900945922s" podCreationTimestamp="2025-12-03 16:38:01 +0000 UTC" firstStartedPulling="2025-12-03 16:38:02.831181885 +0000 UTC m=+406.245003773" lastFinishedPulling="2025-12-03 16:38:05.231869416 +0000 UTC m=+408.645691304" observedRunningTime="2025-12-03 16:38:06.897450226 +0000 UTC m=+410.311272124" watchObservedRunningTime="2025-12-03 16:38:06.900945922 +0000 UTC m=+410.314767820" Dec 03 16:38:09 crc kubenswrapper[5002]: I1203 16:38:09.187513 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:38:09 crc kubenswrapper[5002]: I1203 16:38:09.187584 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:38:09 crc kubenswrapper[5002]: I1203 16:38:09.233249 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:38:09 crc kubenswrapper[5002]: I1203 16:38:09.385926 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:38:09 crc kubenswrapper[5002]: I1203 16:38:09.385991 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:38:09 crc kubenswrapper[5002]: I1203 16:38:09.433957 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:38:09 crc kubenswrapper[5002]: I1203 16:38:09.783831 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" podUID="56badc5f-4e9d-4129-855f-4c2a54eb63d8" containerName="registry" containerID="cri-o://8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178" gracePeriod=30 Dec 03 16:38:09 crc kubenswrapper[5002]: I1203 16:38:09.935620 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vjcrp" Dec 03 16:38:09 crc kubenswrapper[5002]: I1203 16:38:09.936681 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nd4c5" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.237973 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.307733 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.307888 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/56badc5f-4e9d-4129-855f-4c2a54eb63d8-registry-certificates\") pod \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.307934 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-registry-tls\") pod \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.307969 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/56badc5f-4e9d-4129-855f-4c2a54eb63d8-installation-pull-secrets\") pod \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.308005 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmkrv\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-kube-api-access-mmkrv\") pod \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.308078 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/56badc5f-4e9d-4129-855f-4c2a54eb63d8-trusted-ca\") pod \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.308133 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/56badc5f-4e9d-4129-855f-4c2a54eb63d8-ca-trust-extracted\") pod \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.308172 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-bound-sa-token\") pod \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\" (UID: \"56badc5f-4e9d-4129-855f-4c2a54eb63d8\") " Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.310159 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56badc5f-4e9d-4129-855f-4c2a54eb63d8-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "56badc5f-4e9d-4129-855f-4c2a54eb63d8" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.310620 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56badc5f-4e9d-4129-855f-4c2a54eb63d8-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "56badc5f-4e9d-4129-855f-4c2a54eb63d8" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.320844 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56badc5f-4e9d-4129-855f-4c2a54eb63d8-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "56badc5f-4e9d-4129-855f-4c2a54eb63d8" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.321262 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "56badc5f-4e9d-4129-855f-4c2a54eb63d8" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.324100 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "56badc5f-4e9d-4129-855f-4c2a54eb63d8" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.325029 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-kube-api-access-mmkrv" (OuterVolumeSpecName: "kube-api-access-mmkrv") pod "56badc5f-4e9d-4129-855f-4c2a54eb63d8" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8"). InnerVolumeSpecName "kube-api-access-mmkrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.327461 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "56badc5f-4e9d-4129-855f-4c2a54eb63d8" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.329128 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56badc5f-4e9d-4129-855f-4c2a54eb63d8-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "56badc5f-4e9d-4129-855f-4c2a54eb63d8" (UID: "56badc5f-4e9d-4129-855f-4c2a54eb63d8"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.410141 5002 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/56badc5f-4e9d-4129-855f-4c2a54eb63d8-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.410197 5002 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.410209 5002 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/56badc5f-4e9d-4129-855f-4c2a54eb63d8-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.410220 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmkrv\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-kube-api-access-mmkrv\") on node \"crc\" DevicePath \"\"" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.410231 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/56badc5f-4e9d-4129-855f-4c2a54eb63d8-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.410240 5002 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/56badc5f-4e9d-4129-855f-4c2a54eb63d8-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.410251 5002 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56badc5f-4e9d-4129-855f-4c2a54eb63d8-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.898374 5002 generic.go:334] "Generic (PLEG): container finished" podID="56badc5f-4e9d-4129-855f-4c2a54eb63d8" containerID="8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178" exitCode=0 Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.898492 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.898508 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" event={"ID":"56badc5f-4e9d-4129-855f-4c2a54eb63d8","Type":"ContainerDied","Data":"8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178"} Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.898583 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-6qpdx" event={"ID":"56badc5f-4e9d-4129-855f-4c2a54eb63d8","Type":"ContainerDied","Data":"7d144f941c74168a20d6d792f9ad026a786f7a14e3bdbc5faed1f974b3766022"} Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.898606 5002 scope.go:117] "RemoveContainer" containerID="8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.920658 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-6qpdx"] Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.924439 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-6qpdx"] Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.925630 5002 scope.go:117] "RemoveContainer" containerID="8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178" Dec 03 16:38:10 crc kubenswrapper[5002]: E1203 16:38:10.926328 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178\": container with ID starting with 8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178 not found: ID does not exist" containerID="8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178" Dec 03 16:38:10 crc kubenswrapper[5002]: I1203 16:38:10.926372 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178"} err="failed to get container status \"8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178\": rpc error: code = NotFound desc = could not find container \"8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178\": container with ID starting with 8a19eed46ffe536e8e3e389df509862720fb487382e46d0608b1cc35804d9178 not found: ID does not exist" Dec 03 16:38:11 crc kubenswrapper[5002]: I1203 16:38:11.603328 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:11 crc kubenswrapper[5002]: I1203 16:38:11.603392 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:11 crc kubenswrapper[5002]: I1203 16:38:11.648156 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:11 crc kubenswrapper[5002]: I1203 16:38:11.774046 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:11 crc kubenswrapper[5002]: I1203 16:38:11.774258 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:11 crc kubenswrapper[5002]: I1203 16:38:11.824738 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:11 crc kubenswrapper[5002]: I1203 16:38:11.949595 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:38:11 crc kubenswrapper[5002]: I1203 16:38:11.950063 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l74ff" Dec 03 16:38:12 crc kubenswrapper[5002]: I1203 16:38:12.847557 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56badc5f-4e9d-4129-855f-4c2a54eb63d8" path="/var/lib/kubelet/pods/56badc5f-4e9d-4129-855f-4c2a54eb63d8/volumes" Dec 03 16:38:20 crc kubenswrapper[5002]: I1203 16:38:20.917148 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:38:20 crc kubenswrapper[5002]: I1203 16:38:20.917802 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:38:20 crc kubenswrapper[5002]: I1203 16:38:20.917890 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:38:20 crc kubenswrapper[5002]: I1203 16:38:20.918718 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1bbdabe3a3b817c3eac3b26805d605bfaebd6c3b6c1598575ca596c5e99f6ed6"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:38:20 crc kubenswrapper[5002]: I1203 16:38:20.918831 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://1bbdabe3a3b817c3eac3b26805d605bfaebd6c3b6c1598575ca596c5e99f6ed6" gracePeriod=600 Dec 03 16:38:21 crc kubenswrapper[5002]: I1203 16:38:21.965680 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="1bbdabe3a3b817c3eac3b26805d605bfaebd6c3b6c1598575ca596c5e99f6ed6" exitCode=0 Dec 03 16:38:21 crc kubenswrapper[5002]: I1203 16:38:21.965864 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"1bbdabe3a3b817c3eac3b26805d605bfaebd6c3b6c1598575ca596c5e99f6ed6"} Dec 03 16:38:21 crc kubenswrapper[5002]: I1203 16:38:21.966030 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"3dde95e504f4ee4688555761e9d2854799e5d45cf99d6e5bd6e341cb98b55ff4"} Dec 03 16:38:21 crc kubenswrapper[5002]: I1203 16:38:21.966054 5002 scope.go:117] "RemoveContainer" containerID="d35df9eafc14ed95f66af8acad3170ccc11f5da49a1725d8779b230ba8694cf5" Dec 03 16:40:50 crc kubenswrapper[5002]: I1203 16:40:50.916841 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:40:50 crc kubenswrapper[5002]: I1203 16:40:50.917427 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:41:20 crc kubenswrapper[5002]: I1203 16:41:20.917476 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:41:20 crc kubenswrapper[5002]: I1203 16:41:20.918600 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:41:50 crc kubenswrapper[5002]: I1203 16:41:50.916840 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:41:50 crc kubenswrapper[5002]: I1203 16:41:50.917456 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:41:50 crc kubenswrapper[5002]: I1203 16:41:50.917520 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:41:50 crc kubenswrapper[5002]: I1203 16:41:50.918263 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3dde95e504f4ee4688555761e9d2854799e5d45cf99d6e5bd6e341cb98b55ff4"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:41:50 crc kubenswrapper[5002]: I1203 16:41:50.918358 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://3dde95e504f4ee4688555761e9d2854799e5d45cf99d6e5bd6e341cb98b55ff4" gracePeriod=600 Dec 03 16:41:51 crc kubenswrapper[5002]: I1203 16:41:51.471280 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="3dde95e504f4ee4688555761e9d2854799e5d45cf99d6e5bd6e341cb98b55ff4" exitCode=0 Dec 03 16:41:51 crc kubenswrapper[5002]: I1203 16:41:51.471368 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"3dde95e504f4ee4688555761e9d2854799e5d45cf99d6e5bd6e341cb98b55ff4"} Dec 03 16:41:51 crc kubenswrapper[5002]: I1203 16:41:51.471660 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"a989eba6e883743beeaf62d8ab7a87b75096a5c8d56e61d5262eda90b8b04b66"} Dec 03 16:41:51 crc kubenswrapper[5002]: I1203 16:41:51.471712 5002 scope.go:117] "RemoveContainer" containerID="1bbdabe3a3b817c3eac3b26805d605bfaebd6c3b6c1598575ca596c5e99f6ed6" Dec 03 16:43:35 crc kubenswrapper[5002]: I1203 16:43:35.590377 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Liveness probe status=failure output="Get \"http://localhost:1936/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 16:43:35 crc kubenswrapper[5002]: I1203 16:43:35.591087 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 16:43:35 crc kubenswrapper[5002]: I1203 16:43:35.590541 5002 patch_prober.go:28] interesting pod/router-default-5444994796-wtrm8 container/router namespace/openshift-ingress: Readiness probe status=failure output="Get \"http://localhost:1936/healthz/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 16:43:35 crc kubenswrapper[5002]: I1203 16:43:35.591222 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-ingress/router-default-5444994796-wtrm8" podUID="1a6cf65d-9ddb-4d3b-af08-73cc5e10d5fe" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 16:43:58 crc kubenswrapper[5002]: I1203 16:43:58.107327 5002 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 16:44:20 crc kubenswrapper[5002]: I1203 16:44:20.917844 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:44:20 crc kubenswrapper[5002]: I1203 16:44:20.921575 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:44:35 crc kubenswrapper[5002]: I1203 16:44:35.878134 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4szh5"] Dec 03 16:44:35 crc kubenswrapper[5002]: I1203 16:44:35.879289 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovn-controller" containerID="cri-o://de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c" gracePeriod=30 Dec 03 16:44:35 crc kubenswrapper[5002]: I1203 16:44:35.879989 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovn-acl-logging" containerID="cri-o://0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f" gracePeriod=30 Dec 03 16:44:35 crc kubenswrapper[5002]: I1203 16:44:35.880065 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="northd" containerID="cri-o://7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb" gracePeriod=30 Dec 03 16:44:35 crc kubenswrapper[5002]: I1203 16:44:35.880177 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="sbdb" containerID="cri-o://b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c" gracePeriod=30 Dec 03 16:44:35 crc kubenswrapper[5002]: I1203 16:44:35.880297 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="kube-rbac-proxy-node" containerID="cri-o://d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137" gracePeriod=30 Dec 03 16:44:35 crc kubenswrapper[5002]: I1203 16:44:35.880350 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="nbdb" containerID="cri-o://675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af" gracePeriod=30 Dec 03 16:44:35 crc kubenswrapper[5002]: I1203 16:44:35.880692 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd" gracePeriod=30 Dec 03 16:44:35 crc kubenswrapper[5002]: I1203 16:44:35.920695 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" containerID="cri-o://6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529" gracePeriod=30 Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.184623 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/3.log" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.187790 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovn-acl-logging/0.log" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.188347 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovn-controller/0.log" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.188970 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250249 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jdknp"] Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.250544 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250558 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.250568 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovn-acl-logging" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250577 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovn-acl-logging" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.250588 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56badc5f-4e9d-4129-855f-4c2a54eb63d8" containerName="registry" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250596 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="56badc5f-4e9d-4129-855f-4c2a54eb63d8" containerName="registry" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.250605 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="northd" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250611 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="northd" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.250619 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="sbdb" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250624 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="sbdb" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.250635 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250642 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.250655 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250662 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.250670 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="kubecfg-setup" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250676 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="kubecfg-setup" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.250683 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250690 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.250701 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="nbdb" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250707 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="nbdb" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.250716 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovn-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250723 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovn-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.250734 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="kube-rbac-proxy-node" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250764 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="kube-rbac-proxy-node" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250862 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="sbdb" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250877 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovn-acl-logging" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250888 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="kube-rbac-proxy-node" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250897 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250904 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="56badc5f-4e9d-4129-855f-4c2a54eb63d8" containerName="registry" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250911 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250918 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="northd" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250926 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250934 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250940 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovn-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250950 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="nbdb" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.250957 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.251053 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.251059 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.251154 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: E1203 16:44:36.251243 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.251251 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerName="ovnkube-controller" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.254183 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.311630 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-cni-bin\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.311703 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovnkube-script-lib\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.311759 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-kubelet\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.311818 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-run-ovn-kubernetes\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.311811 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.311842 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovnkube-config\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.311916 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.311923 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.311978 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-slash" (OuterVolumeSpecName: "host-slash") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.311955 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-slash\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312078 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pxr9\" (UniqueName: \"kubernetes.io/projected/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-kube-api-access-4pxr9\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312109 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-etc-openvswitch\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312135 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-env-overrides\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312159 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-systemd\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312188 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-systemd-units\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312206 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-log-socket\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312206 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312250 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-node-log" (OuterVolumeSpecName: "node-log") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312230 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-node-log\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312274 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312294 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-log-socket" (OuterVolumeSpecName: "log-socket") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312312 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-var-lib-openvswitch\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312352 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-var-lib-cni-networks-ovn-kubernetes\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312413 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-ovn\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312411 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312437 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovn-node-metrics-cert\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312456 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-run-netns\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312466 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312476 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-openvswitch\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312499 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312503 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-cni-netd\") pod \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\" (UID: \"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01\") " Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312521 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312545 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312566 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312606 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312792 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312794 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-run-openvswitch\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312852 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fb318592-8d9d-4223-9be0-54ee54692248-env-overrides\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312897 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-systemd-units\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312922 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-var-lib-openvswitch\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312954 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-run-ovn\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312978 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-run-systemd\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.312995 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-run-netns\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313018 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-slash\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313045 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89m64\" (UniqueName: \"kubernetes.io/projected/fb318592-8d9d-4223-9be0-54ee54692248-kube-api-access-89m64\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313072 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-run-ovn-kubernetes\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313097 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fb318592-8d9d-4223-9be0-54ee54692248-ovnkube-config\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313116 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-log-socket\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313138 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fb318592-8d9d-4223-9be0-54ee54692248-ovnkube-script-lib\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313177 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-kubelet\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313202 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-node-log\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313238 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-cni-netd\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313445 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313495 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fb318592-8d9d-4223-9be0-54ee54692248-ovn-node-metrics-cert\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313614 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-cni-bin\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313788 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-etc-openvswitch\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313856 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.313989 5002 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314005 5002 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314018 5002 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314029 5002 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-log-socket\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314042 5002 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-node-log\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314052 5002 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314066 5002 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314078 5002 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314089 5002 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314100 5002 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314110 5002 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314120 5002 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314130 5002 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314140 5002 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314152 5002 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314162 5002 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.314173 5002 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-host-slash\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.320121 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-kube-api-access-4pxr9" (OuterVolumeSpecName: "kube-api-access-4pxr9") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "kube-api-access-4pxr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.320207 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.329344 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" (UID: "fc13f2ec-2d1e-4432-9f8d-82079a9dfe01"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.416136 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-node-log\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.416800 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-cni-netd\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.416843 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fb318592-8d9d-4223-9be0-54ee54692248-ovn-node-metrics-cert\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.416410 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-node-log\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.416874 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-cni-bin\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.416927 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-cni-bin\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.416971 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-etc-openvswitch\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417012 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417028 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-cni-netd\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417094 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-etc-openvswitch\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417072 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-run-openvswitch\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417049 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-run-openvswitch\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417143 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417178 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fb318592-8d9d-4223-9be0-54ee54692248-env-overrides\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417258 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-systemd-units\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417285 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-var-lib-openvswitch\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417319 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-run-ovn\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417344 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-run-netns\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417365 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-run-systemd\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417387 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-slash\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417410 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89m64\" (UniqueName: \"kubernetes.io/projected/fb318592-8d9d-4223-9be0-54ee54692248-kube-api-access-89m64\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417437 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-run-ovn-kubernetes\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417474 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fb318592-8d9d-4223-9be0-54ee54692248-ovnkube-config\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417516 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-log-socket\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417540 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fb318592-8d9d-4223-9be0-54ee54692248-ovnkube-script-lib\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417570 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-kubelet\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417633 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pxr9\" (UniqueName: \"kubernetes.io/projected/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-kube-api-access-4pxr9\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417648 5002 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417661 5002 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417701 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-kubelet\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417732 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-systemd-units\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417778 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-var-lib-openvswitch\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417807 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-run-ovn\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417837 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-run-netns\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417869 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-run-systemd\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417881 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fb318592-8d9d-4223-9be0-54ee54692248-env-overrides\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417899 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-slash\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.417938 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-log-socket\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.418005 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fb318592-8d9d-4223-9be0-54ee54692248-host-run-ovn-kubernetes\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.418584 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fb318592-8d9d-4223-9be0-54ee54692248-ovnkube-script-lib\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.419229 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fb318592-8d9d-4223-9be0-54ee54692248-ovnkube-config\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.422039 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fb318592-8d9d-4223-9be0-54ee54692248-ovn-node-metrics-cert\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.436902 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89m64\" (UniqueName: \"kubernetes.io/projected/fb318592-8d9d-4223-9be0-54ee54692248-kube-api-access-89m64\") pod \"ovnkube-node-jdknp\" (UID: \"fb318592-8d9d-4223-9be0-54ee54692248\") " pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.571702 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.943261 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gjxps_2de485fd-67c0-4be7-abb1-92509ea373da/kube-multus/2.log" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.943927 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gjxps_2de485fd-67c0-4be7-abb1-92509ea373da/kube-multus/1.log" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.943978 5002 generic.go:334] "Generic (PLEG): container finished" podID="2de485fd-67c0-4be7-abb1-92509ea373da" containerID="cd3179f37f5c9234d0dd5300e3b24e2e121394d7137b95d57682b75d5d266c1b" exitCode=2 Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.944356 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gjxps" event={"ID":"2de485fd-67c0-4be7-abb1-92509ea373da","Type":"ContainerDied","Data":"cd3179f37f5c9234d0dd5300e3b24e2e121394d7137b95d57682b75d5d266c1b"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.944450 5002 scope.go:117] "RemoveContainer" containerID="46d73cb93f4e004b38542b0972ec1eab4e61b4698c14cef11eae5b5aeb61c233" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.945026 5002 scope.go:117] "RemoveContainer" containerID="cd3179f37f5c9234d0dd5300e3b24e2e121394d7137b95d57682b75d5d266c1b" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.947319 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovnkube-controller/3.log" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.951353 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovn-acl-logging/0.log" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.952694 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4szh5_fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/ovn-controller/0.log" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953427 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529" exitCode=0 Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953451 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c" exitCode=0 Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953458 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af" exitCode=0 Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953467 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb" exitCode=0 Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953474 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd" exitCode=0 Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953484 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137" exitCode=0 Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953492 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f" exitCode=143 Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953498 5002 generic.go:334] "Generic (PLEG): container finished" podID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" containerID="de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c" exitCode=143 Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953540 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953575 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953587 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953596 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953606 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953615 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953627 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953637 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953643 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953648 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953653 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953659 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953663 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953724 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953734 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953758 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953767 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953777 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953783 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953789 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953795 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953800 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953805 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953811 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953817 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953823 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953828 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953835 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953843 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953849 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953855 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953861 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953867 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953873 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953879 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953884 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953889 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953895 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953902 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" event={"ID":"fc13f2ec-2d1e-4432-9f8d-82079a9dfe01","Type":"ContainerDied","Data":"b7de1f69a6d71ed023463e83211b9db79010da03f9e2419ceb36470b10309fe5"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953910 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953918 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953925 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953930 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953936 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953941 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953947 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953952 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953957 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.953962 5002 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.954069 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4szh5" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.956643 5002 generic.go:334] "Generic (PLEG): container finished" podID="fb318592-8d9d-4223-9be0-54ee54692248" containerID="3e5c0d75974e879c3383c36a3567c013ab6934bcd726753494980c3d95f469fa" exitCode=0 Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.956678 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" event={"ID":"fb318592-8d9d-4223-9be0-54ee54692248","Type":"ContainerDied","Data":"3e5c0d75974e879c3383c36a3567c013ab6934bcd726753494980c3d95f469fa"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.956700 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" event={"ID":"fb318592-8d9d-4223-9be0-54ee54692248","Type":"ContainerStarted","Data":"aae7bc75bc9be4963dd4e6df0483c19688b51cf835f559fe18f56e473c0dbb78"} Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.974057 5002 scope.go:117] "RemoveContainer" containerID="6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529" Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.988929 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4szh5"] Dec 03 16:44:36 crc kubenswrapper[5002]: I1203 16:44:36.992187 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4szh5"] Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:36.999944 5002 scope.go:117] "RemoveContainer" containerID="4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.043788 5002 scope.go:117] "RemoveContainer" containerID="b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.088001 5002 scope.go:117] "RemoveContainer" containerID="675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.116092 5002 scope.go:117] "RemoveContainer" containerID="7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.130878 5002 scope.go:117] "RemoveContainer" containerID="73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.148214 5002 scope.go:117] "RemoveContainer" containerID="d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.174467 5002 scope.go:117] "RemoveContainer" containerID="0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.207857 5002 scope.go:117] "RemoveContainer" containerID="de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.223026 5002 scope.go:117] "RemoveContainer" containerID="e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.247464 5002 scope.go:117] "RemoveContainer" containerID="6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529" Dec 03 16:44:37 crc kubenswrapper[5002]: E1203 16:44:37.248705 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529\": container with ID starting with 6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529 not found: ID does not exist" containerID="6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.248759 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529"} err="failed to get container status \"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529\": rpc error: code = NotFound desc = could not find container \"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529\": container with ID starting with 6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529 not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.248787 5002 scope.go:117] "RemoveContainer" containerID="4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b" Dec 03 16:44:37 crc kubenswrapper[5002]: E1203 16:44:37.249168 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\": container with ID starting with 4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b not found: ID does not exist" containerID="4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.249198 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b"} err="failed to get container status \"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\": rpc error: code = NotFound desc = could not find container \"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\": container with ID starting with 4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.249212 5002 scope.go:117] "RemoveContainer" containerID="b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c" Dec 03 16:44:37 crc kubenswrapper[5002]: E1203 16:44:37.249521 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\": container with ID starting with b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c not found: ID does not exist" containerID="b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.249564 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c"} err="failed to get container status \"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\": rpc error: code = NotFound desc = could not find container \"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\": container with ID starting with b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.249580 5002 scope.go:117] "RemoveContainer" containerID="675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af" Dec 03 16:44:37 crc kubenswrapper[5002]: E1203 16:44:37.249978 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\": container with ID starting with 675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af not found: ID does not exist" containerID="675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.250000 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af"} err="failed to get container status \"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\": rpc error: code = NotFound desc = could not find container \"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\": container with ID starting with 675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.250018 5002 scope.go:117] "RemoveContainer" containerID="7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb" Dec 03 16:44:37 crc kubenswrapper[5002]: E1203 16:44:37.250340 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\": container with ID starting with 7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb not found: ID does not exist" containerID="7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.250362 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb"} err="failed to get container status \"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\": rpc error: code = NotFound desc = could not find container \"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\": container with ID starting with 7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.250377 5002 scope.go:117] "RemoveContainer" containerID="73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd" Dec 03 16:44:37 crc kubenswrapper[5002]: E1203 16:44:37.250974 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\": container with ID starting with 73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd not found: ID does not exist" containerID="73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.250993 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd"} err="failed to get container status \"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\": rpc error: code = NotFound desc = could not find container \"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\": container with ID starting with 73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.251014 5002 scope.go:117] "RemoveContainer" containerID="d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137" Dec 03 16:44:37 crc kubenswrapper[5002]: E1203 16:44:37.251394 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\": container with ID starting with d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137 not found: ID does not exist" containerID="d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.251414 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137"} err="failed to get container status \"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\": rpc error: code = NotFound desc = could not find container \"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\": container with ID starting with d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137 not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.251426 5002 scope.go:117] "RemoveContainer" containerID="0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f" Dec 03 16:44:37 crc kubenswrapper[5002]: E1203 16:44:37.251828 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\": container with ID starting with 0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f not found: ID does not exist" containerID="0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.251850 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f"} err="failed to get container status \"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\": rpc error: code = NotFound desc = could not find container \"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\": container with ID starting with 0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.251878 5002 scope.go:117] "RemoveContainer" containerID="de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c" Dec 03 16:44:37 crc kubenswrapper[5002]: E1203 16:44:37.252081 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\": container with ID starting with de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c not found: ID does not exist" containerID="de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.252106 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c"} err="failed to get container status \"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\": rpc error: code = NotFound desc = could not find container \"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\": container with ID starting with de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.252122 5002 scope.go:117] "RemoveContainer" containerID="e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4" Dec 03 16:44:37 crc kubenswrapper[5002]: E1203 16:44:37.252428 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\": container with ID starting with e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4 not found: ID does not exist" containerID="e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.252451 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4"} err="failed to get container status \"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\": rpc error: code = NotFound desc = could not find container \"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\": container with ID starting with e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4 not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.252464 5002 scope.go:117] "RemoveContainer" containerID="6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.252786 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529"} err="failed to get container status \"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529\": rpc error: code = NotFound desc = could not find container \"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529\": container with ID starting with 6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529 not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.252805 5002 scope.go:117] "RemoveContainer" containerID="4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.253105 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b"} err="failed to get container status \"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\": rpc error: code = NotFound desc = could not find container \"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\": container with ID starting with 4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.253122 5002 scope.go:117] "RemoveContainer" containerID="b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.253389 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c"} err="failed to get container status \"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\": rpc error: code = NotFound desc = could not find container \"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\": container with ID starting with b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.253416 5002 scope.go:117] "RemoveContainer" containerID="675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.253828 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af"} err="failed to get container status \"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\": rpc error: code = NotFound desc = could not find container \"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\": container with ID starting with 675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.253853 5002 scope.go:117] "RemoveContainer" containerID="7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.254117 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb"} err="failed to get container status \"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\": rpc error: code = NotFound desc = could not find container \"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\": container with ID starting with 7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.254137 5002 scope.go:117] "RemoveContainer" containerID="73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.254391 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd"} err="failed to get container status \"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\": rpc error: code = NotFound desc = could not find container \"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\": container with ID starting with 73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.254409 5002 scope.go:117] "RemoveContainer" containerID="d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.254642 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137"} err="failed to get container status \"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\": rpc error: code = NotFound desc = could not find container \"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\": container with ID starting with d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137 not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.254659 5002 scope.go:117] "RemoveContainer" containerID="0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.257512 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f"} err="failed to get container status \"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\": rpc error: code = NotFound desc = could not find container \"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\": container with ID starting with 0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.257563 5002 scope.go:117] "RemoveContainer" containerID="de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.258077 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c"} err="failed to get container status \"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\": rpc error: code = NotFound desc = could not find container \"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\": container with ID starting with de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.258104 5002 scope.go:117] "RemoveContainer" containerID="e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.258371 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4"} err="failed to get container status \"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\": rpc error: code = NotFound desc = could not find container \"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\": container with ID starting with e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4 not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.258390 5002 scope.go:117] "RemoveContainer" containerID="6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.258764 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529"} err="failed to get container status \"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529\": rpc error: code = NotFound desc = could not find container \"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529\": container with ID starting with 6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529 not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.258786 5002 scope.go:117] "RemoveContainer" containerID="4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.259079 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b"} err="failed to get container status \"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\": rpc error: code = NotFound desc = could not find container \"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\": container with ID starting with 4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.259098 5002 scope.go:117] "RemoveContainer" containerID="b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.259614 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c"} err="failed to get container status \"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\": rpc error: code = NotFound desc = could not find container \"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\": container with ID starting with b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.259634 5002 scope.go:117] "RemoveContainer" containerID="675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.259961 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af"} err="failed to get container status \"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\": rpc error: code = NotFound desc = could not find container \"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\": container with ID starting with 675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.259979 5002 scope.go:117] "RemoveContainer" containerID="7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.260298 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb"} err="failed to get container status \"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\": rpc error: code = NotFound desc = could not find container \"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\": container with ID starting with 7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.260316 5002 scope.go:117] "RemoveContainer" containerID="73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.260526 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd"} err="failed to get container status \"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\": rpc error: code = NotFound desc = could not find container \"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\": container with ID starting with 73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.260545 5002 scope.go:117] "RemoveContainer" containerID="d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.260799 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137"} err="failed to get container status \"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\": rpc error: code = NotFound desc = could not find container \"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\": container with ID starting with d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137 not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.260821 5002 scope.go:117] "RemoveContainer" containerID="0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.261086 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f"} err="failed to get container status \"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\": rpc error: code = NotFound desc = could not find container \"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\": container with ID starting with 0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.261114 5002 scope.go:117] "RemoveContainer" containerID="de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.261364 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c"} err="failed to get container status \"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\": rpc error: code = NotFound desc = could not find container \"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\": container with ID starting with de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.261384 5002 scope.go:117] "RemoveContainer" containerID="e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.261604 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4"} err="failed to get container status \"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\": rpc error: code = NotFound desc = could not find container \"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\": container with ID starting with e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4 not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.261624 5002 scope.go:117] "RemoveContainer" containerID="6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.261835 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529"} err="failed to get container status \"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529\": rpc error: code = NotFound desc = could not find container \"6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529\": container with ID starting with 6e95c6a0610cf6ad4d4fc40eff8fa618355c0df3ed4548edc041955902d9e529 not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.261853 5002 scope.go:117] "RemoveContainer" containerID="4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.262032 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b"} err="failed to get container status \"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\": rpc error: code = NotFound desc = could not find container \"4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b\": container with ID starting with 4fdacd0d8a142479e4d47667aadc849c5193ee8555c43185b18666e0b518da8b not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.262058 5002 scope.go:117] "RemoveContainer" containerID="b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.262247 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c"} err="failed to get container status \"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\": rpc error: code = NotFound desc = could not find container \"b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c\": container with ID starting with b7700962aff0d72e43692f7b3698e216077d9e534c00dca14c6b794ccaae9c3c not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.262265 5002 scope.go:117] "RemoveContainer" containerID="675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.262448 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af"} err="failed to get container status \"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\": rpc error: code = NotFound desc = could not find container \"675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af\": container with ID starting with 675cc2fa68bc8bf4032d1d22d9705300d744c6bf419775579e2ef6ab8d0868af not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.262468 5002 scope.go:117] "RemoveContainer" containerID="7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.262635 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb"} err="failed to get container status \"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\": rpc error: code = NotFound desc = could not find container \"7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb\": container with ID starting with 7aebc3982c95e437808fba965b0dff3bcdc270d1131f71a8d15fa5d7e02a48eb not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.262653 5002 scope.go:117] "RemoveContainer" containerID="73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.262890 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd"} err="failed to get container status \"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\": rpc error: code = NotFound desc = could not find container \"73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd\": container with ID starting with 73c2318d84f589a86c8760832cb69fd85a2c191bfd5a75d6d717db0f76dc1ffd not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.262907 5002 scope.go:117] "RemoveContainer" containerID="d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.263187 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137"} err="failed to get container status \"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\": rpc error: code = NotFound desc = could not find container \"d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137\": container with ID starting with d001de71d4b749b58c92f66d23d4b0c79f8374fba607c2dc1089be5468995137 not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.263227 5002 scope.go:117] "RemoveContainer" containerID="0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.263507 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f"} err="failed to get container status \"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\": rpc error: code = NotFound desc = could not find container \"0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f\": container with ID starting with 0f41edac26178b88388f3aaad49e808e728705fc541bf180e00c721950dfe68f not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.263582 5002 scope.go:117] "RemoveContainer" containerID="de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.263950 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c"} err="failed to get container status \"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\": rpc error: code = NotFound desc = could not find container \"de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c\": container with ID starting with de833df97676b18492f91a6d83404e18b064cc4979fcbbb252453ec4f1ef336c not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.263969 5002 scope.go:117] "RemoveContainer" containerID="e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.264167 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4"} err="failed to get container status \"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\": rpc error: code = NotFound desc = could not find container \"e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4\": container with ID starting with e4904106b11e0b25419333b1effda00b14de26645ce48e734357625abb948df4 not found: ID does not exist" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.968005 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" event={"ID":"fb318592-8d9d-4223-9be0-54ee54692248","Type":"ContainerStarted","Data":"6fb40edad5fe01be37a29b231c2ca5cf3c2bfe3ddca6595d2c1e5ec387e7105b"} Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.968403 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" event={"ID":"fb318592-8d9d-4223-9be0-54ee54692248","Type":"ContainerStarted","Data":"390a4293ff3916af7789519ce985aa0285887a0cf3dbaa71edb8c431695bed29"} Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.968418 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" event={"ID":"fb318592-8d9d-4223-9be0-54ee54692248","Type":"ContainerStarted","Data":"efc5e4445579c7ab2665521a7bf85da1f646c94a933cc41b45ea806090f3df6f"} Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.968428 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" event={"ID":"fb318592-8d9d-4223-9be0-54ee54692248","Type":"ContainerStarted","Data":"7668a13ea7c5195f87010472f423094f9e9552e2a42691cc3f9a8354e675e603"} Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.968438 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" event={"ID":"fb318592-8d9d-4223-9be0-54ee54692248","Type":"ContainerStarted","Data":"4a136aa7d2e0feeee8db9d373bd9eada63ea317ab452eaef1521746ba7e194f1"} Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.968447 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" event={"ID":"fb318592-8d9d-4223-9be0-54ee54692248","Type":"ContainerStarted","Data":"67f1c92609d180383a2466222bcc5cbcb9d4b4e040a6944e9d781fd8fc194e47"} Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.970156 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gjxps_2de485fd-67c0-4be7-abb1-92509ea373da/kube-multus/2.log" Dec 03 16:44:37 crc kubenswrapper[5002]: I1203 16:44:37.970200 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gjxps" event={"ID":"2de485fd-67c0-4be7-abb1-92509ea373da","Type":"ContainerStarted","Data":"e80761f472b220ca067c07e0d0580b05e0373892787f9c1bde9aaf363fe0c408"} Dec 03 16:44:38 crc kubenswrapper[5002]: I1203 16:44:38.847409 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc13f2ec-2d1e-4432-9f8d-82079a9dfe01" path="/var/lib/kubelet/pods/fc13f2ec-2d1e-4432-9f8d-82079a9dfe01/volumes" Dec 03 16:44:39 crc kubenswrapper[5002]: I1203 16:44:39.985087 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" event={"ID":"fb318592-8d9d-4223-9be0-54ee54692248","Type":"ContainerStarted","Data":"8d7c1eda2698ddf4655def7d0cd94bf11516b96a8d177e9aedb831b091b09f98"} Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.197380 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-85ktn"] Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.198911 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.201813 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.201866 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.201899 5002 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-r9hql" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.202658 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.290650 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htqt4\" (UniqueName: \"kubernetes.io/projected/f99802d3-48de-4371-8d44-468b33627dfa-kube-api-access-htqt4\") pod \"crc-storage-crc-85ktn\" (UID: \"f99802d3-48de-4371-8d44-468b33627dfa\") " pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.290730 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f99802d3-48de-4371-8d44-468b33627dfa-node-mnt\") pod \"crc-storage-crc-85ktn\" (UID: \"f99802d3-48de-4371-8d44-468b33627dfa\") " pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.290932 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f99802d3-48de-4371-8d44-468b33627dfa-crc-storage\") pod \"crc-storage-crc-85ktn\" (UID: \"f99802d3-48de-4371-8d44-468b33627dfa\") " pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.392673 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f99802d3-48de-4371-8d44-468b33627dfa-crc-storage\") pod \"crc-storage-crc-85ktn\" (UID: \"f99802d3-48de-4371-8d44-468b33627dfa\") " pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.392780 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htqt4\" (UniqueName: \"kubernetes.io/projected/f99802d3-48de-4371-8d44-468b33627dfa-kube-api-access-htqt4\") pod \"crc-storage-crc-85ktn\" (UID: \"f99802d3-48de-4371-8d44-468b33627dfa\") " pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.392815 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f99802d3-48de-4371-8d44-468b33627dfa-node-mnt\") pod \"crc-storage-crc-85ktn\" (UID: \"f99802d3-48de-4371-8d44-468b33627dfa\") " pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.393081 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f99802d3-48de-4371-8d44-468b33627dfa-node-mnt\") pod \"crc-storage-crc-85ktn\" (UID: \"f99802d3-48de-4371-8d44-468b33627dfa\") " pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.393421 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f99802d3-48de-4371-8d44-468b33627dfa-crc-storage\") pod \"crc-storage-crc-85ktn\" (UID: \"f99802d3-48de-4371-8d44-468b33627dfa\") " pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.412346 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htqt4\" (UniqueName: \"kubernetes.io/projected/f99802d3-48de-4371-8d44-468b33627dfa-kube-api-access-htqt4\") pod \"crc-storage-crc-85ktn\" (UID: \"f99802d3-48de-4371-8d44-468b33627dfa\") " pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.518401 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: E1203 16:44:42.546123 5002 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-85ktn_crc-storage_f99802d3-48de-4371-8d44-468b33627dfa_0(3c94f5525efef804b7518aa38bd02e3e80d17c87e06790c59a086df75198084b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:44:42 crc kubenswrapper[5002]: E1203 16:44:42.546215 5002 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-85ktn_crc-storage_f99802d3-48de-4371-8d44-468b33627dfa_0(3c94f5525efef804b7518aa38bd02e3e80d17c87e06790c59a086df75198084b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: E1203 16:44:42.546244 5002 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-85ktn_crc-storage_f99802d3-48de-4371-8d44-468b33627dfa_0(3c94f5525efef804b7518aa38bd02e3e80d17c87e06790c59a086df75198084b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:42 crc kubenswrapper[5002]: E1203 16:44:42.546298 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-85ktn_crc-storage(f99802d3-48de-4371-8d44-468b33627dfa)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-85ktn_crc-storage(f99802d3-48de-4371-8d44-468b33627dfa)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-85ktn_crc-storage_f99802d3-48de-4371-8d44-468b33627dfa_0(3c94f5525efef804b7518aa38bd02e3e80d17c87e06790c59a086df75198084b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-85ktn" podUID="f99802d3-48de-4371-8d44-468b33627dfa" Dec 03 16:44:42 crc kubenswrapper[5002]: I1203 16:44:42.968886 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-85ktn"] Dec 03 16:44:43 crc kubenswrapper[5002]: I1203 16:44:43.004183 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" event={"ID":"fb318592-8d9d-4223-9be0-54ee54692248","Type":"ContainerStarted","Data":"9f7a73a541ac94fd5a5507719461857aa2d9fc53c5a512e6f1f0c4aa2c46f2c3"} Dec 03 16:44:43 crc kubenswrapper[5002]: I1203 16:44:43.004205 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:43 crc kubenswrapper[5002]: I1203 16:44:43.004496 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:43 crc kubenswrapper[5002]: I1203 16:44:43.005127 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:43 crc kubenswrapper[5002]: E1203 16:44:43.029468 5002 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-85ktn_crc-storage_f99802d3-48de-4371-8d44-468b33627dfa_0(3db5574b7aa9328216c501c86463e852d1f825da1bef8ca7ce88ba467810ff90): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:44:43 crc kubenswrapper[5002]: E1203 16:44:43.029556 5002 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-85ktn_crc-storage_f99802d3-48de-4371-8d44-468b33627dfa_0(3db5574b7aa9328216c501c86463e852d1f825da1bef8ca7ce88ba467810ff90): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:43 crc kubenswrapper[5002]: E1203 16:44:43.029585 5002 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-85ktn_crc-storage_f99802d3-48de-4371-8d44-468b33627dfa_0(3db5574b7aa9328216c501c86463e852d1f825da1bef8ca7ce88ba467810ff90): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:43 crc kubenswrapper[5002]: E1203 16:44:43.029646 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-85ktn_crc-storage(f99802d3-48de-4371-8d44-468b33627dfa)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-85ktn_crc-storage(f99802d3-48de-4371-8d44-468b33627dfa)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-85ktn_crc-storage_f99802d3-48de-4371-8d44-468b33627dfa_0(3db5574b7aa9328216c501c86463e852d1f825da1bef8ca7ce88ba467810ff90): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-85ktn" podUID="f99802d3-48de-4371-8d44-468b33627dfa" Dec 03 16:44:43 crc kubenswrapper[5002]: I1203 16:44:43.039440 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" podStartSLOduration=7.039420048 podStartE2EDuration="7.039420048s" podCreationTimestamp="2025-12-03 16:44:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:43.035865361 +0000 UTC m=+806.449687269" watchObservedRunningTime="2025-12-03 16:44:43.039420048 +0000 UTC m=+806.453241946" Dec 03 16:44:43 crc kubenswrapper[5002]: I1203 16:44:43.043990 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:44 crc kubenswrapper[5002]: I1203 16:44:44.009903 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:44 crc kubenswrapper[5002]: I1203 16:44:44.010310 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:44 crc kubenswrapper[5002]: I1203 16:44:44.034759 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:44:50 crc kubenswrapper[5002]: I1203 16:44:50.916712 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:44:50 crc kubenswrapper[5002]: I1203 16:44:50.917154 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:44:54 crc kubenswrapper[5002]: I1203 16:44:54.840212 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:54 crc kubenswrapper[5002]: I1203 16:44:54.841441 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:55 crc kubenswrapper[5002]: I1203 16:44:55.101611 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-85ktn"] Dec 03 16:44:55 crc kubenswrapper[5002]: I1203 16:44:55.115349 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 16:44:56 crc kubenswrapper[5002]: I1203 16:44:56.110228 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-85ktn" event={"ID":"f99802d3-48de-4371-8d44-468b33627dfa","Type":"ContainerStarted","Data":"88332cddedbcd6d25604fcf7ea72dd2d1e72fcaaf5e687268aa0f09ee2e2da71"} Dec 03 16:44:58 crc kubenswrapper[5002]: I1203 16:44:58.126022 5002 generic.go:334] "Generic (PLEG): container finished" podID="f99802d3-48de-4371-8d44-468b33627dfa" containerID="19e728c93e9ceb38a9c99bec1252830ce0792306044661663cda3346f5acc98f" exitCode=0 Dec 03 16:44:58 crc kubenswrapper[5002]: I1203 16:44:58.126140 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-85ktn" event={"ID":"f99802d3-48de-4371-8d44-468b33627dfa","Type":"ContainerDied","Data":"19e728c93e9ceb38a9c99bec1252830ce0792306044661663cda3346f5acc98f"} Dec 03 16:44:59 crc kubenswrapper[5002]: I1203 16:44:59.396650 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:44:59 crc kubenswrapper[5002]: I1203 16:44:59.552947 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f99802d3-48de-4371-8d44-468b33627dfa-crc-storage\") pod \"f99802d3-48de-4371-8d44-468b33627dfa\" (UID: \"f99802d3-48de-4371-8d44-468b33627dfa\") " Dec 03 16:44:59 crc kubenswrapper[5002]: I1203 16:44:59.553069 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f99802d3-48de-4371-8d44-468b33627dfa-node-mnt\") pod \"f99802d3-48de-4371-8d44-468b33627dfa\" (UID: \"f99802d3-48de-4371-8d44-468b33627dfa\") " Dec 03 16:44:59 crc kubenswrapper[5002]: I1203 16:44:59.553111 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htqt4\" (UniqueName: \"kubernetes.io/projected/f99802d3-48de-4371-8d44-468b33627dfa-kube-api-access-htqt4\") pod \"f99802d3-48de-4371-8d44-468b33627dfa\" (UID: \"f99802d3-48de-4371-8d44-468b33627dfa\") " Dec 03 16:44:59 crc kubenswrapper[5002]: I1203 16:44:59.553216 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f99802d3-48de-4371-8d44-468b33627dfa-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "f99802d3-48de-4371-8d44-468b33627dfa" (UID: "f99802d3-48de-4371-8d44-468b33627dfa"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:44:59 crc kubenswrapper[5002]: I1203 16:44:59.554141 5002 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f99802d3-48de-4371-8d44-468b33627dfa-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:59 crc kubenswrapper[5002]: I1203 16:44:59.562546 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f99802d3-48de-4371-8d44-468b33627dfa-kube-api-access-htqt4" (OuterVolumeSpecName: "kube-api-access-htqt4") pod "f99802d3-48de-4371-8d44-468b33627dfa" (UID: "f99802d3-48de-4371-8d44-468b33627dfa"). InnerVolumeSpecName "kube-api-access-htqt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:59 crc kubenswrapper[5002]: I1203 16:44:59.579134 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f99802d3-48de-4371-8d44-468b33627dfa-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "f99802d3-48de-4371-8d44-468b33627dfa" (UID: "f99802d3-48de-4371-8d44-468b33627dfa"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:59 crc kubenswrapper[5002]: I1203 16:44:59.656100 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htqt4\" (UniqueName: \"kubernetes.io/projected/f99802d3-48de-4371-8d44-468b33627dfa-kube-api-access-htqt4\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:59 crc kubenswrapper[5002]: I1203 16:44:59.656193 5002 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f99802d3-48de-4371-8d44-468b33627dfa-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.143676 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-85ktn" event={"ID":"f99802d3-48de-4371-8d44-468b33627dfa","Type":"ContainerDied","Data":"88332cddedbcd6d25604fcf7ea72dd2d1e72fcaaf5e687268aa0f09ee2e2da71"} Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.143723 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88332cddedbcd6d25604fcf7ea72dd2d1e72fcaaf5e687268aa0f09ee2e2da71" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.143815 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-85ktn" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.177228 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs"] Dec 03 16:45:00 crc kubenswrapper[5002]: E1203 16:45:00.178201 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f99802d3-48de-4371-8d44-468b33627dfa" containerName="storage" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.178230 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f99802d3-48de-4371-8d44-468b33627dfa" containerName="storage" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.178860 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f99802d3-48de-4371-8d44-468b33627dfa" containerName="storage" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.179721 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.193910 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.199600 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.209227 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs"] Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.264838 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/002bdda7-9280-4517-a570-2f2d4f1d1dab-config-volume\") pod \"collect-profiles-29413005-jmfzs\" (UID: \"002bdda7-9280-4517-a570-2f2d4f1d1dab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.265145 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/002bdda7-9280-4517-a570-2f2d4f1d1dab-secret-volume\") pod \"collect-profiles-29413005-jmfzs\" (UID: \"002bdda7-9280-4517-a570-2f2d4f1d1dab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.265266 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plgc4\" (UniqueName: \"kubernetes.io/projected/002bdda7-9280-4517-a570-2f2d4f1d1dab-kube-api-access-plgc4\") pod \"collect-profiles-29413005-jmfzs\" (UID: \"002bdda7-9280-4517-a570-2f2d4f1d1dab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.366369 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/002bdda7-9280-4517-a570-2f2d4f1d1dab-config-volume\") pod \"collect-profiles-29413005-jmfzs\" (UID: \"002bdda7-9280-4517-a570-2f2d4f1d1dab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.366446 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/002bdda7-9280-4517-a570-2f2d4f1d1dab-secret-volume\") pod \"collect-profiles-29413005-jmfzs\" (UID: \"002bdda7-9280-4517-a570-2f2d4f1d1dab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.366464 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plgc4\" (UniqueName: \"kubernetes.io/projected/002bdda7-9280-4517-a570-2f2d4f1d1dab-kube-api-access-plgc4\") pod \"collect-profiles-29413005-jmfzs\" (UID: \"002bdda7-9280-4517-a570-2f2d4f1d1dab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.368094 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/002bdda7-9280-4517-a570-2f2d4f1d1dab-config-volume\") pod \"collect-profiles-29413005-jmfzs\" (UID: \"002bdda7-9280-4517-a570-2f2d4f1d1dab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.375604 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/002bdda7-9280-4517-a570-2f2d4f1d1dab-secret-volume\") pod \"collect-profiles-29413005-jmfzs\" (UID: \"002bdda7-9280-4517-a570-2f2d4f1d1dab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.400046 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plgc4\" (UniqueName: \"kubernetes.io/projected/002bdda7-9280-4517-a570-2f2d4f1d1dab-kube-api-access-plgc4\") pod \"collect-profiles-29413005-jmfzs\" (UID: \"002bdda7-9280-4517-a570-2f2d4f1d1dab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.505243 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:00 crc kubenswrapper[5002]: I1203 16:45:00.712018 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs"] Dec 03 16:45:00 crc kubenswrapper[5002]: W1203 16:45:00.723959 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod002bdda7_9280_4517_a570_2f2d4f1d1dab.slice/crio-165d40c7860d46f323e2879dea22751ab5e5abecbaa52c911fbbb1940b45a7b9 WatchSource:0}: Error finding container 165d40c7860d46f323e2879dea22751ab5e5abecbaa52c911fbbb1940b45a7b9: Status 404 returned error can't find the container with id 165d40c7860d46f323e2879dea22751ab5e5abecbaa52c911fbbb1940b45a7b9 Dec 03 16:45:01 crc kubenswrapper[5002]: I1203 16:45:01.153156 5002 generic.go:334] "Generic (PLEG): container finished" podID="002bdda7-9280-4517-a570-2f2d4f1d1dab" containerID="8dd70c8987394a9ec519d147d9609ce75933d4316bd207ebe443d4b203a7a48e" exitCode=0 Dec 03 16:45:01 crc kubenswrapper[5002]: I1203 16:45:01.153233 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" event={"ID":"002bdda7-9280-4517-a570-2f2d4f1d1dab","Type":"ContainerDied","Data":"8dd70c8987394a9ec519d147d9609ce75933d4316bd207ebe443d4b203a7a48e"} Dec 03 16:45:01 crc kubenswrapper[5002]: I1203 16:45:01.153272 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" event={"ID":"002bdda7-9280-4517-a570-2f2d4f1d1dab","Type":"ContainerStarted","Data":"165d40c7860d46f323e2879dea22751ab5e5abecbaa52c911fbbb1940b45a7b9"} Dec 03 16:45:02 crc kubenswrapper[5002]: I1203 16:45:02.396649 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:02 crc kubenswrapper[5002]: I1203 16:45:02.499092 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-plgc4\" (UniqueName: \"kubernetes.io/projected/002bdda7-9280-4517-a570-2f2d4f1d1dab-kube-api-access-plgc4\") pod \"002bdda7-9280-4517-a570-2f2d4f1d1dab\" (UID: \"002bdda7-9280-4517-a570-2f2d4f1d1dab\") " Dec 03 16:45:02 crc kubenswrapper[5002]: I1203 16:45:02.499293 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/002bdda7-9280-4517-a570-2f2d4f1d1dab-secret-volume\") pod \"002bdda7-9280-4517-a570-2f2d4f1d1dab\" (UID: \"002bdda7-9280-4517-a570-2f2d4f1d1dab\") " Dec 03 16:45:02 crc kubenswrapper[5002]: I1203 16:45:02.499323 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/002bdda7-9280-4517-a570-2f2d4f1d1dab-config-volume\") pod \"002bdda7-9280-4517-a570-2f2d4f1d1dab\" (UID: \"002bdda7-9280-4517-a570-2f2d4f1d1dab\") " Dec 03 16:45:02 crc kubenswrapper[5002]: I1203 16:45:02.500286 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/002bdda7-9280-4517-a570-2f2d4f1d1dab-config-volume" (OuterVolumeSpecName: "config-volume") pod "002bdda7-9280-4517-a570-2f2d4f1d1dab" (UID: "002bdda7-9280-4517-a570-2f2d4f1d1dab"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:45:02 crc kubenswrapper[5002]: I1203 16:45:02.506960 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/002bdda7-9280-4517-a570-2f2d4f1d1dab-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "002bdda7-9280-4517-a570-2f2d4f1d1dab" (UID: "002bdda7-9280-4517-a570-2f2d4f1d1dab"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:02 crc kubenswrapper[5002]: I1203 16:45:02.506981 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/002bdda7-9280-4517-a570-2f2d4f1d1dab-kube-api-access-plgc4" (OuterVolumeSpecName: "kube-api-access-plgc4") pod "002bdda7-9280-4517-a570-2f2d4f1d1dab" (UID: "002bdda7-9280-4517-a570-2f2d4f1d1dab"). InnerVolumeSpecName "kube-api-access-plgc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:45:02 crc kubenswrapper[5002]: I1203 16:45:02.600955 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-plgc4\" (UniqueName: \"kubernetes.io/projected/002bdda7-9280-4517-a570-2f2d4f1d1dab-kube-api-access-plgc4\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:02 crc kubenswrapper[5002]: I1203 16:45:02.600997 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/002bdda7-9280-4517-a570-2f2d4f1d1dab-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:02 crc kubenswrapper[5002]: I1203 16:45:02.601008 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/002bdda7-9280-4517-a570-2f2d4f1d1dab-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:03 crc kubenswrapper[5002]: I1203 16:45:03.170285 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" event={"ID":"002bdda7-9280-4517-a570-2f2d4f1d1dab","Type":"ContainerDied","Data":"165d40c7860d46f323e2879dea22751ab5e5abecbaa52c911fbbb1940b45a7b9"} Dec 03 16:45:03 crc kubenswrapper[5002]: I1203 16:45:03.170340 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="165d40c7860d46f323e2879dea22751ab5e5abecbaa52c911fbbb1940b45a7b9" Dec 03 16:45:03 crc kubenswrapper[5002]: I1203 16:45:03.170405 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs" Dec 03 16:45:06 crc kubenswrapper[5002]: I1203 16:45:06.597990 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jdknp" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.286041 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh"] Dec 03 16:45:07 crc kubenswrapper[5002]: E1203 16:45:07.286429 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="002bdda7-9280-4517-a570-2f2d4f1d1dab" containerName="collect-profiles" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.286986 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="002bdda7-9280-4517-a570-2f2d4f1d1dab" containerName="collect-profiles" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.287132 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="002bdda7-9280-4517-a570-2f2d4f1d1dab" containerName="collect-profiles" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.288251 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.291047 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.301077 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh"] Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.363732 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e70649f-5753-43d7-8641-572e9ab62148-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh\" (UID: \"5e70649f-5753-43d7-8641-572e9ab62148\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.363842 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8hzx\" (UniqueName: \"kubernetes.io/projected/5e70649f-5753-43d7-8641-572e9ab62148-kube-api-access-c8hzx\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh\" (UID: \"5e70649f-5753-43d7-8641-572e9ab62148\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.363972 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e70649f-5753-43d7-8641-572e9ab62148-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh\" (UID: \"5e70649f-5753-43d7-8641-572e9ab62148\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.465477 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e70649f-5753-43d7-8641-572e9ab62148-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh\" (UID: \"5e70649f-5753-43d7-8641-572e9ab62148\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.465790 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8hzx\" (UniqueName: \"kubernetes.io/projected/5e70649f-5753-43d7-8641-572e9ab62148-kube-api-access-c8hzx\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh\" (UID: \"5e70649f-5753-43d7-8641-572e9ab62148\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.465925 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e70649f-5753-43d7-8641-572e9ab62148-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh\" (UID: \"5e70649f-5753-43d7-8641-572e9ab62148\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.466528 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e70649f-5753-43d7-8641-572e9ab62148-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh\" (UID: \"5e70649f-5753-43d7-8641-572e9ab62148\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.466574 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e70649f-5753-43d7-8641-572e9ab62148-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh\" (UID: \"5e70649f-5753-43d7-8641-572e9ab62148\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.492131 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8hzx\" (UniqueName: \"kubernetes.io/projected/5e70649f-5753-43d7-8641-572e9ab62148-kube-api-access-c8hzx\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh\" (UID: \"5e70649f-5753-43d7-8641-572e9ab62148\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.612952 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:07 crc kubenswrapper[5002]: I1203 16:45:07.892866 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh"] Dec 03 16:45:07 crc kubenswrapper[5002]: W1203 16:45:07.905069 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e70649f_5753_43d7_8641_572e9ab62148.slice/crio-a1f2f868ec787a5fd4180643f20c700c0e0f563df29bcab35a14b1494bc77439 WatchSource:0}: Error finding container a1f2f868ec787a5fd4180643f20c700c0e0f563df29bcab35a14b1494bc77439: Status 404 returned error can't find the container with id a1f2f868ec787a5fd4180643f20c700c0e0f563df29bcab35a14b1494bc77439 Dec 03 16:45:08 crc kubenswrapper[5002]: I1203 16:45:08.201505 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" event={"ID":"5e70649f-5753-43d7-8641-572e9ab62148","Type":"ContainerStarted","Data":"c82f0e1c047fa0e50afe71b39c6f9f84459f919ca5b56420c0edd04540664957"} Dec 03 16:45:08 crc kubenswrapper[5002]: I1203 16:45:08.201558 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" event={"ID":"5e70649f-5753-43d7-8641-572e9ab62148","Type":"ContainerStarted","Data":"a1f2f868ec787a5fd4180643f20c700c0e0f563df29bcab35a14b1494bc77439"} Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.227137 5002 generic.go:334] "Generic (PLEG): container finished" podID="5e70649f-5753-43d7-8641-572e9ab62148" containerID="c82f0e1c047fa0e50afe71b39c6f9f84459f919ca5b56420c0edd04540664957" exitCode=0 Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.227381 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" event={"ID":"5e70649f-5753-43d7-8641-572e9ab62148","Type":"ContainerDied","Data":"c82f0e1c047fa0e50afe71b39c6f9f84459f919ca5b56420c0edd04540664957"} Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.482966 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zl48q"] Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.485104 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.500179 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zl48q"] Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.598650 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntcvh\" (UniqueName: \"kubernetes.io/projected/84f6062f-c4a6-4417-96b6-f184e52fa5bc-kube-api-access-ntcvh\") pod \"redhat-operators-zl48q\" (UID: \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\") " pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.598711 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84f6062f-c4a6-4417-96b6-f184e52fa5bc-catalog-content\") pod \"redhat-operators-zl48q\" (UID: \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\") " pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.598768 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84f6062f-c4a6-4417-96b6-f184e52fa5bc-utilities\") pod \"redhat-operators-zl48q\" (UID: \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\") " pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.700857 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84f6062f-c4a6-4417-96b6-f184e52fa5bc-utilities\") pod \"redhat-operators-zl48q\" (UID: \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\") " pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.701372 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntcvh\" (UniqueName: \"kubernetes.io/projected/84f6062f-c4a6-4417-96b6-f184e52fa5bc-kube-api-access-ntcvh\") pod \"redhat-operators-zl48q\" (UID: \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\") " pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.701492 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84f6062f-c4a6-4417-96b6-f184e52fa5bc-catalog-content\") pod \"redhat-operators-zl48q\" (UID: \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\") " pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.702238 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84f6062f-c4a6-4417-96b6-f184e52fa5bc-utilities\") pod \"redhat-operators-zl48q\" (UID: \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\") " pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.702421 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84f6062f-c4a6-4417-96b6-f184e52fa5bc-catalog-content\") pod \"redhat-operators-zl48q\" (UID: \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\") " pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.728932 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntcvh\" (UniqueName: \"kubernetes.io/projected/84f6062f-c4a6-4417-96b6-f184e52fa5bc-kube-api-access-ntcvh\") pod \"redhat-operators-zl48q\" (UID: \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\") " pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:09 crc kubenswrapper[5002]: I1203 16:45:09.809287 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:10 crc kubenswrapper[5002]: I1203 16:45:10.016040 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zl48q"] Dec 03 16:45:10 crc kubenswrapper[5002]: I1203 16:45:10.236876 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zl48q" event={"ID":"84f6062f-c4a6-4417-96b6-f184e52fa5bc","Type":"ContainerStarted","Data":"1a92776f066a3655c141ee4e4b280ee425cf0922d626bcf87d9950e5bc33691c"} Dec 03 16:45:10 crc kubenswrapper[5002]: I1203 16:45:10.236933 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zl48q" event={"ID":"84f6062f-c4a6-4417-96b6-f184e52fa5bc","Type":"ContainerStarted","Data":"f396853788df202816847420a3b80b3aa65c680c07e36176d7dc50a5c47a57c0"} Dec 03 16:45:11 crc kubenswrapper[5002]: I1203 16:45:11.245128 5002 generic.go:334] "Generic (PLEG): container finished" podID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" containerID="1a92776f066a3655c141ee4e4b280ee425cf0922d626bcf87d9950e5bc33691c" exitCode=0 Dec 03 16:45:11 crc kubenswrapper[5002]: I1203 16:45:11.245258 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zl48q" event={"ID":"84f6062f-c4a6-4417-96b6-f184e52fa5bc","Type":"ContainerDied","Data":"1a92776f066a3655c141ee4e4b280ee425cf0922d626bcf87d9950e5bc33691c"} Dec 03 16:45:11 crc kubenswrapper[5002]: I1203 16:45:11.248367 5002 generic.go:334] "Generic (PLEG): container finished" podID="5e70649f-5753-43d7-8641-572e9ab62148" containerID="cd0cedc77c4b3ecb18f99fee85131b9849a8a80ea21bba4055178887e25e4f64" exitCode=0 Dec 03 16:45:11 crc kubenswrapper[5002]: I1203 16:45:11.248404 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" event={"ID":"5e70649f-5753-43d7-8641-572e9ab62148","Type":"ContainerDied","Data":"cd0cedc77c4b3ecb18f99fee85131b9849a8a80ea21bba4055178887e25e4f64"} Dec 03 16:45:12 crc kubenswrapper[5002]: I1203 16:45:12.257463 5002 generic.go:334] "Generic (PLEG): container finished" podID="5e70649f-5753-43d7-8641-572e9ab62148" containerID="58940a1bde42dcbb8a806b55ac441b49a882d30ce6734ffa9a6fe8043cbd72d4" exitCode=0 Dec 03 16:45:12 crc kubenswrapper[5002]: I1203 16:45:12.257503 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" event={"ID":"5e70649f-5753-43d7-8641-572e9ab62148","Type":"ContainerDied","Data":"58940a1bde42dcbb8a806b55ac441b49a882d30ce6734ffa9a6fe8043cbd72d4"} Dec 03 16:45:12 crc kubenswrapper[5002]: I1203 16:45:12.260497 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zl48q" event={"ID":"84f6062f-c4a6-4417-96b6-f184e52fa5bc","Type":"ContainerStarted","Data":"1f37d32430944be13b59c07dab92fbd77de727123e567de0fd0ae039c1baedfb"} Dec 03 16:45:13 crc kubenswrapper[5002]: I1203 16:45:13.272045 5002 generic.go:334] "Generic (PLEG): container finished" podID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" containerID="1f37d32430944be13b59c07dab92fbd77de727123e567de0fd0ae039c1baedfb" exitCode=0 Dec 03 16:45:13 crc kubenswrapper[5002]: I1203 16:45:13.272151 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zl48q" event={"ID":"84f6062f-c4a6-4417-96b6-f184e52fa5bc","Type":"ContainerDied","Data":"1f37d32430944be13b59c07dab92fbd77de727123e567de0fd0ae039c1baedfb"} Dec 03 16:45:13 crc kubenswrapper[5002]: I1203 16:45:13.535627 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:13 crc kubenswrapper[5002]: I1203 16:45:13.661479 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8hzx\" (UniqueName: \"kubernetes.io/projected/5e70649f-5753-43d7-8641-572e9ab62148-kube-api-access-c8hzx\") pod \"5e70649f-5753-43d7-8641-572e9ab62148\" (UID: \"5e70649f-5753-43d7-8641-572e9ab62148\") " Dec 03 16:45:13 crc kubenswrapper[5002]: I1203 16:45:13.661589 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e70649f-5753-43d7-8641-572e9ab62148-util\") pod \"5e70649f-5753-43d7-8641-572e9ab62148\" (UID: \"5e70649f-5753-43d7-8641-572e9ab62148\") " Dec 03 16:45:13 crc kubenswrapper[5002]: I1203 16:45:13.661684 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e70649f-5753-43d7-8641-572e9ab62148-bundle\") pod \"5e70649f-5753-43d7-8641-572e9ab62148\" (UID: \"5e70649f-5753-43d7-8641-572e9ab62148\") " Dec 03 16:45:13 crc kubenswrapper[5002]: I1203 16:45:13.663018 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e70649f-5753-43d7-8641-572e9ab62148-bundle" (OuterVolumeSpecName: "bundle") pod "5e70649f-5753-43d7-8641-572e9ab62148" (UID: "5e70649f-5753-43d7-8641-572e9ab62148"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:45:13 crc kubenswrapper[5002]: I1203 16:45:13.672132 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e70649f-5753-43d7-8641-572e9ab62148-kube-api-access-c8hzx" (OuterVolumeSpecName: "kube-api-access-c8hzx") pod "5e70649f-5753-43d7-8641-572e9ab62148" (UID: "5e70649f-5753-43d7-8641-572e9ab62148"). InnerVolumeSpecName "kube-api-access-c8hzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:45:13 crc kubenswrapper[5002]: I1203 16:45:13.685119 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e70649f-5753-43d7-8641-572e9ab62148-util" (OuterVolumeSpecName: "util") pod "5e70649f-5753-43d7-8641-572e9ab62148" (UID: "5e70649f-5753-43d7-8641-572e9ab62148"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:45:13 crc kubenswrapper[5002]: I1203 16:45:13.763573 5002 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e70649f-5753-43d7-8641-572e9ab62148-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:13 crc kubenswrapper[5002]: I1203 16:45:13.763619 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8hzx\" (UniqueName: \"kubernetes.io/projected/5e70649f-5753-43d7-8641-572e9ab62148-kube-api-access-c8hzx\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:13 crc kubenswrapper[5002]: I1203 16:45:13.763634 5002 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e70649f-5753-43d7-8641-572e9ab62148-util\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:14 crc kubenswrapper[5002]: I1203 16:45:14.293504 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" event={"ID":"5e70649f-5753-43d7-8641-572e9ab62148","Type":"ContainerDied","Data":"a1f2f868ec787a5fd4180643f20c700c0e0f563df29bcab35a14b1494bc77439"} Dec 03 16:45:14 crc kubenswrapper[5002]: I1203 16:45:14.293582 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1f2f868ec787a5fd4180643f20c700c0e0f563df29bcab35a14b1494bc77439" Dec 03 16:45:14 crc kubenswrapper[5002]: I1203 16:45:14.293714 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh" Dec 03 16:45:15 crc kubenswrapper[5002]: I1203 16:45:15.303571 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zl48q" event={"ID":"84f6062f-c4a6-4417-96b6-f184e52fa5bc","Type":"ContainerStarted","Data":"47c69c7f8c32a0d9dab06d129f5a1d5d100c6e80f0fb4d2a3ecdabd5615f022c"} Dec 03 16:45:15 crc kubenswrapper[5002]: I1203 16:45:15.329326 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zl48q" podStartSLOduration=3.272142936 podStartE2EDuration="6.329300123s" podCreationTimestamp="2025-12-03 16:45:09 +0000 UTC" firstStartedPulling="2025-12-03 16:45:11.248010296 +0000 UTC m=+834.661832174" lastFinishedPulling="2025-12-03 16:45:14.305167423 +0000 UTC m=+837.718989361" observedRunningTime="2025-12-03 16:45:15.326483396 +0000 UTC m=+838.740305334" watchObservedRunningTime="2025-12-03 16:45:15.329300123 +0000 UTC m=+838.743122001" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.615470 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-9hq4g"] Dec 03 16:45:17 crc kubenswrapper[5002]: E1203 16:45:17.616014 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e70649f-5753-43d7-8641-572e9ab62148" containerName="util" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.616026 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e70649f-5753-43d7-8641-572e9ab62148" containerName="util" Dec 03 16:45:17 crc kubenswrapper[5002]: E1203 16:45:17.616038 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e70649f-5753-43d7-8641-572e9ab62148" containerName="extract" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.616045 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e70649f-5753-43d7-8641-572e9ab62148" containerName="extract" Dec 03 16:45:17 crc kubenswrapper[5002]: E1203 16:45:17.616058 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e70649f-5753-43d7-8641-572e9ab62148" containerName="pull" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.616064 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e70649f-5753-43d7-8641-572e9ab62148" containerName="pull" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.616172 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e70649f-5753-43d7-8641-572e9ab62148" containerName="extract" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.616590 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-9hq4g" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.618740 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.618909 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-t89gv" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.624600 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.629683 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-9hq4g"] Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.721838 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb8r9\" (UniqueName: \"kubernetes.io/projected/0c279a3b-ffa8-4136-9932-f483512bbb7c-kube-api-access-hb8r9\") pod \"nmstate-operator-5b5b58f5c8-9hq4g\" (UID: \"0c279a3b-ffa8-4136-9932-f483512bbb7c\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-9hq4g" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.824053 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb8r9\" (UniqueName: \"kubernetes.io/projected/0c279a3b-ffa8-4136-9932-f483512bbb7c-kube-api-access-hb8r9\") pod \"nmstate-operator-5b5b58f5c8-9hq4g\" (UID: \"0c279a3b-ffa8-4136-9932-f483512bbb7c\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-9hq4g" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.843625 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb8r9\" (UniqueName: \"kubernetes.io/projected/0c279a3b-ffa8-4136-9932-f483512bbb7c-kube-api-access-hb8r9\") pod \"nmstate-operator-5b5b58f5c8-9hq4g\" (UID: \"0c279a3b-ffa8-4136-9932-f483512bbb7c\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-9hq4g" Dec 03 16:45:17 crc kubenswrapper[5002]: I1203 16:45:17.968117 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-9hq4g" Dec 03 16:45:18 crc kubenswrapper[5002]: I1203 16:45:18.237982 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-9hq4g"] Dec 03 16:45:18 crc kubenswrapper[5002]: W1203 16:45:18.259346 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c279a3b_ffa8_4136_9932_f483512bbb7c.slice/crio-0e2778b8f05610ebff166af431cf19705e33c612419a879f39a2aea2f4d8e2d8 WatchSource:0}: Error finding container 0e2778b8f05610ebff166af431cf19705e33c612419a879f39a2aea2f4d8e2d8: Status 404 returned error can't find the container with id 0e2778b8f05610ebff166af431cf19705e33c612419a879f39a2aea2f4d8e2d8 Dec 03 16:45:18 crc kubenswrapper[5002]: I1203 16:45:18.321877 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-9hq4g" event={"ID":"0c279a3b-ffa8-4136-9932-f483512bbb7c","Type":"ContainerStarted","Data":"0e2778b8f05610ebff166af431cf19705e33c612419a879f39a2aea2f4d8e2d8"} Dec 03 16:45:19 crc kubenswrapper[5002]: I1203 16:45:19.810555 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:19 crc kubenswrapper[5002]: I1203 16:45:19.810975 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:20 crc kubenswrapper[5002]: I1203 16:45:20.848885 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zl48q" podUID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" containerName="registry-server" probeResult="failure" output=< Dec 03 16:45:20 crc kubenswrapper[5002]: timeout: failed to connect service ":50051" within 1s Dec 03 16:45:20 crc kubenswrapper[5002]: > Dec 03 16:45:20 crc kubenswrapper[5002]: I1203 16:45:20.917062 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:45:20 crc kubenswrapper[5002]: I1203 16:45:20.917155 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:45:20 crc kubenswrapper[5002]: I1203 16:45:20.917233 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:45:20 crc kubenswrapper[5002]: I1203 16:45:20.918479 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a989eba6e883743beeaf62d8ab7a87b75096a5c8d56e61d5262eda90b8b04b66"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:45:20 crc kubenswrapper[5002]: I1203 16:45:20.918571 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://a989eba6e883743beeaf62d8ab7a87b75096a5c8d56e61d5262eda90b8b04b66" gracePeriod=600 Dec 03 16:45:21 crc kubenswrapper[5002]: I1203 16:45:21.343654 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="a989eba6e883743beeaf62d8ab7a87b75096a5c8d56e61d5262eda90b8b04b66" exitCode=0 Dec 03 16:45:21 crc kubenswrapper[5002]: I1203 16:45:21.343724 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"a989eba6e883743beeaf62d8ab7a87b75096a5c8d56e61d5262eda90b8b04b66"} Dec 03 16:45:21 crc kubenswrapper[5002]: I1203 16:45:21.343789 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"73ac542ac5ae95737fc5bd0085cb65082e08deae3560c2f23506ea5bddf84026"} Dec 03 16:45:21 crc kubenswrapper[5002]: I1203 16:45:21.343808 5002 scope.go:117] "RemoveContainer" containerID="3dde95e504f4ee4688555761e9d2854799e5d45cf99d6e5bd6e341cb98b55ff4" Dec 03 16:45:21 crc kubenswrapper[5002]: I1203 16:45:21.345975 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-9hq4g" event={"ID":"0c279a3b-ffa8-4136-9932-f483512bbb7c","Type":"ContainerStarted","Data":"8cd982496fde468f0722178e4a8f79ffebbcb0f3a66bf8d2b7e6d43a1034e7a9"} Dec 03 16:45:21 crc kubenswrapper[5002]: I1203 16:45:21.385386 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-9hq4g" podStartSLOduration=1.760345311 podStartE2EDuration="4.385357039s" podCreationTimestamp="2025-12-03 16:45:17 +0000 UTC" firstStartedPulling="2025-12-03 16:45:18.267465922 +0000 UTC m=+841.681287810" lastFinishedPulling="2025-12-03 16:45:20.89247765 +0000 UTC m=+844.306299538" observedRunningTime="2025-12-03 16:45:21.384934547 +0000 UTC m=+844.798756525" watchObservedRunningTime="2025-12-03 16:45:21.385357039 +0000 UTC m=+844.799178947" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.624680 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-w2kzn"] Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.626567 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-w2kzn" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.641075 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn"] Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.642236 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.644525 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-w2kzn"] Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.645962 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.646314 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-mdrkw" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.660435 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-rd64m"] Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.661345 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.693258 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn"] Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.778727 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b952bb42-1df0-4837-9c7e-ab25b7949f89-dbus-socket\") pod \"nmstate-handler-rd64m\" (UID: \"b952bb42-1df0-4837-9c7e-ab25b7949f89\") " pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.778799 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrbrs\" (UniqueName: \"kubernetes.io/projected/b952bb42-1df0-4837-9c7e-ab25b7949f89-kube-api-access-nrbrs\") pod \"nmstate-handler-rd64m\" (UID: \"b952bb42-1df0-4837-9c7e-ab25b7949f89\") " pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.778845 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b952bb42-1df0-4837-9c7e-ab25b7949f89-ovs-socket\") pod \"nmstate-handler-rd64m\" (UID: \"b952bb42-1df0-4837-9c7e-ab25b7949f89\") " pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.778898 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9b289\" (UniqueName: \"kubernetes.io/projected/a3a0627d-e103-43bc-a5d6-0933fc925543-kube-api-access-9b289\") pod \"nmstate-metrics-7f946cbc9-w2kzn\" (UID: \"a3a0627d-e103-43bc-a5d6-0933fc925543\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-w2kzn" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.778941 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/55e48020-7db5-4f57-8c21-0dec9e03ef5c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-tv2qn\" (UID: \"55e48020-7db5-4f57-8c21-0dec9e03ef5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.778964 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmsbq\" (UniqueName: \"kubernetes.io/projected/55e48020-7db5-4f57-8c21-0dec9e03ef5c-kube-api-access-lmsbq\") pod \"nmstate-webhook-5f6d4c5ccb-tv2qn\" (UID: \"55e48020-7db5-4f57-8c21-0dec9e03ef5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.779004 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b952bb42-1df0-4837-9c7e-ab25b7949f89-nmstate-lock\") pod \"nmstate-handler-rd64m\" (UID: \"b952bb42-1df0-4837-9c7e-ab25b7949f89\") " pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.833790 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z"] Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.834551 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.850898 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.851084 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-f6k2q" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.853670 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.862500 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z"] Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.880826 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9b289\" (UniqueName: \"kubernetes.io/projected/a3a0627d-e103-43bc-a5d6-0933fc925543-kube-api-access-9b289\") pod \"nmstate-metrics-7f946cbc9-w2kzn\" (UID: \"a3a0627d-e103-43bc-a5d6-0933fc925543\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-w2kzn" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.880875 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/55e48020-7db5-4f57-8c21-0dec9e03ef5c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-tv2qn\" (UID: \"55e48020-7db5-4f57-8c21-0dec9e03ef5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.880896 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmsbq\" (UniqueName: \"kubernetes.io/projected/55e48020-7db5-4f57-8c21-0dec9e03ef5c-kube-api-access-lmsbq\") pod \"nmstate-webhook-5f6d4c5ccb-tv2qn\" (UID: \"55e48020-7db5-4f57-8c21-0dec9e03ef5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.880924 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b952bb42-1df0-4837-9c7e-ab25b7949f89-nmstate-lock\") pod \"nmstate-handler-rd64m\" (UID: \"b952bb42-1df0-4837-9c7e-ab25b7949f89\") " pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.880969 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b952bb42-1df0-4837-9c7e-ab25b7949f89-dbus-socket\") pod \"nmstate-handler-rd64m\" (UID: \"b952bb42-1df0-4837-9c7e-ab25b7949f89\") " pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.880985 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrbrs\" (UniqueName: \"kubernetes.io/projected/b952bb42-1df0-4837-9c7e-ab25b7949f89-kube-api-access-nrbrs\") pod \"nmstate-handler-rd64m\" (UID: \"b952bb42-1df0-4837-9c7e-ab25b7949f89\") " pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.881005 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b952bb42-1df0-4837-9c7e-ab25b7949f89-ovs-socket\") pod \"nmstate-handler-rd64m\" (UID: \"b952bb42-1df0-4837-9c7e-ab25b7949f89\") " pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.881092 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b952bb42-1df0-4837-9c7e-ab25b7949f89-ovs-socket\") pod \"nmstate-handler-rd64m\" (UID: \"b952bb42-1df0-4837-9c7e-ab25b7949f89\") " pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: E1203 16:45:27.881523 5002 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 03 16:45:27 crc kubenswrapper[5002]: E1203 16:45:27.881585 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/55e48020-7db5-4f57-8c21-0dec9e03ef5c-tls-key-pair podName:55e48020-7db5-4f57-8c21-0dec9e03ef5c nodeName:}" failed. No retries permitted until 2025-12-03 16:45:28.381560672 +0000 UTC m=+851.795382560 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/55e48020-7db5-4f57-8c21-0dec9e03ef5c-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-tv2qn" (UID: "55e48020-7db5-4f57-8c21-0dec9e03ef5c") : secret "openshift-nmstate-webhook" not found Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.881881 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b952bb42-1df0-4837-9c7e-ab25b7949f89-nmstate-lock\") pod \"nmstate-handler-rd64m\" (UID: \"b952bb42-1df0-4837-9c7e-ab25b7949f89\") " pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.882231 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b952bb42-1df0-4837-9c7e-ab25b7949f89-dbus-socket\") pod \"nmstate-handler-rd64m\" (UID: \"b952bb42-1df0-4837-9c7e-ab25b7949f89\") " pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.902232 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrbrs\" (UniqueName: \"kubernetes.io/projected/b952bb42-1df0-4837-9c7e-ab25b7949f89-kube-api-access-nrbrs\") pod \"nmstate-handler-rd64m\" (UID: \"b952bb42-1df0-4837-9c7e-ab25b7949f89\") " pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.905325 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9b289\" (UniqueName: \"kubernetes.io/projected/a3a0627d-e103-43bc-a5d6-0933fc925543-kube-api-access-9b289\") pod \"nmstate-metrics-7f946cbc9-w2kzn\" (UID: \"a3a0627d-e103-43bc-a5d6-0933fc925543\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-w2kzn" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.905929 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmsbq\" (UniqueName: \"kubernetes.io/projected/55e48020-7db5-4f57-8c21-0dec9e03ef5c-kube-api-access-lmsbq\") pod \"nmstate-webhook-5f6d4c5ccb-tv2qn\" (UID: \"55e48020-7db5-4f57-8c21-0dec9e03ef5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.955451 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-w2kzn" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.982315 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6c0d4c8e-606d-4a79-85da-56d503115dde-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-tnw4z\" (UID: \"6c0d4c8e-606d-4a79-85da-56d503115dde\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.982785 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6c0d4c8e-606d-4a79-85da-56d503115dde-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-tnw4z\" (UID: \"6c0d4c8e-606d-4a79-85da-56d503115dde\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.982848 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8sdt\" (UniqueName: \"kubernetes.io/projected/6c0d4c8e-606d-4a79-85da-56d503115dde-kube-api-access-g8sdt\") pod \"nmstate-console-plugin-7fbb5f6569-tnw4z\" (UID: \"6c0d4c8e-606d-4a79-85da-56d503115dde\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" Dec 03 16:45:27 crc kubenswrapper[5002]: I1203 16:45:27.993222 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.049950 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5b9b959497-c77rb"] Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.050712 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.073314 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5b9b959497-c77rb"] Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.084137 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6c0d4c8e-606d-4a79-85da-56d503115dde-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-tnw4z\" (UID: \"6c0d4c8e-606d-4a79-85da-56d503115dde\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.084189 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6c0d4c8e-606d-4a79-85da-56d503115dde-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-tnw4z\" (UID: \"6c0d4c8e-606d-4a79-85da-56d503115dde\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.084250 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8sdt\" (UniqueName: \"kubernetes.io/projected/6c0d4c8e-606d-4a79-85da-56d503115dde-kube-api-access-g8sdt\") pod \"nmstate-console-plugin-7fbb5f6569-tnw4z\" (UID: \"6c0d4c8e-606d-4a79-85da-56d503115dde\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" Dec 03 16:45:28 crc kubenswrapper[5002]: E1203 16:45:28.084330 5002 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 03 16:45:28 crc kubenswrapper[5002]: E1203 16:45:28.084440 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c0d4c8e-606d-4a79-85da-56d503115dde-plugin-serving-cert podName:6c0d4c8e-606d-4a79-85da-56d503115dde nodeName:}" failed. No retries permitted until 2025-12-03 16:45:28.584407787 +0000 UTC m=+851.998229735 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/6c0d4c8e-606d-4a79-85da-56d503115dde-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-tnw4z" (UID: "6c0d4c8e-606d-4a79-85da-56d503115dde") : secret "plugin-serving-cert" not found Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.085723 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6c0d4c8e-606d-4a79-85da-56d503115dde-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-tnw4z\" (UID: \"6c0d4c8e-606d-4a79-85da-56d503115dde\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.104136 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8sdt\" (UniqueName: \"kubernetes.io/projected/6c0d4c8e-606d-4a79-85da-56d503115dde-kube-api-access-g8sdt\") pod \"nmstate-console-plugin-7fbb5f6569-tnw4z\" (UID: \"6c0d4c8e-606d-4a79-85da-56d503115dde\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.186167 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/33290a32-ca54-43b1-a932-a43cac140549-trusted-ca-bundle\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.186211 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33290a32-ca54-43b1-a932-a43cac140549-service-ca\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.186288 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/33290a32-ca54-43b1-a932-a43cac140549-console-config\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.186308 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/33290a32-ca54-43b1-a932-a43cac140549-console-oauth-config\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.186331 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/33290a32-ca54-43b1-a932-a43cac140549-oauth-serving-cert\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.186353 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5b2f\" (UniqueName: \"kubernetes.io/projected/33290a32-ca54-43b1-a932-a43cac140549-kube-api-access-r5b2f\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.186380 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/33290a32-ca54-43b1-a932-a43cac140549-console-serving-cert\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.274575 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-w2kzn"] Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.287415 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33290a32-ca54-43b1-a932-a43cac140549-service-ca\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.287522 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/33290a32-ca54-43b1-a932-a43cac140549-console-config\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.287542 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/33290a32-ca54-43b1-a932-a43cac140549-console-oauth-config\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.287561 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/33290a32-ca54-43b1-a932-a43cac140549-oauth-serving-cert\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.287584 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5b2f\" (UniqueName: \"kubernetes.io/projected/33290a32-ca54-43b1-a932-a43cac140549-kube-api-access-r5b2f\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.287610 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/33290a32-ca54-43b1-a932-a43cac140549-console-serving-cert\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.287648 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/33290a32-ca54-43b1-a932-a43cac140549-trusted-ca-bundle\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.289016 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/33290a32-ca54-43b1-a932-a43cac140549-oauth-serving-cert\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.289104 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/33290a32-ca54-43b1-a932-a43cac140549-trusted-ca-bundle\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.289709 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/33290a32-ca54-43b1-a932-a43cac140549-console-config\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.289911 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33290a32-ca54-43b1-a932-a43cac140549-service-ca\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.294534 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/33290a32-ca54-43b1-a932-a43cac140549-console-oauth-config\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.295322 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/33290a32-ca54-43b1-a932-a43cac140549-console-serving-cert\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.307499 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5b2f\" (UniqueName: \"kubernetes.io/projected/33290a32-ca54-43b1-a932-a43cac140549-kube-api-access-r5b2f\") pod \"console-5b9b959497-c77rb\" (UID: \"33290a32-ca54-43b1-a932-a43cac140549\") " pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.380801 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.388826 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/55e48020-7db5-4f57-8c21-0dec9e03ef5c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-tv2qn\" (UID: \"55e48020-7db5-4f57-8c21-0dec9e03ef5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.393919 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/55e48020-7db5-4f57-8c21-0dec9e03ef5c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-tv2qn\" (UID: \"55e48020-7db5-4f57-8c21-0dec9e03ef5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.401290 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-rd64m" event={"ID":"b952bb42-1df0-4837-9c7e-ab25b7949f89","Type":"ContainerStarted","Data":"3e719800ced8de7b513c27b236bceb52a046c134c7e2562078eaa8fede90cf21"} Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.402824 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-w2kzn" event={"ID":"a3a0627d-e103-43bc-a5d6-0933fc925543","Type":"ContainerStarted","Data":"511b4bba5637638a0b1d51b8373ba529c05e6899f30070a9b0e3f9c3bd8aef54"} Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.584192 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.584585 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5b9b959497-c77rb"] Dec 03 16:45:28 crc kubenswrapper[5002]: W1203 16:45:28.590151 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33290a32_ca54_43b1_a932_a43cac140549.slice/crio-bc51a2ffa2c2ca33cd7dc089aa6393e7251d324868fddb0bd7356681ef06798b WatchSource:0}: Error finding container bc51a2ffa2c2ca33cd7dc089aa6393e7251d324868fddb0bd7356681ef06798b: Status 404 returned error can't find the container with id bc51a2ffa2c2ca33cd7dc089aa6393e7251d324868fddb0bd7356681ef06798b Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.592760 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6c0d4c8e-606d-4a79-85da-56d503115dde-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-tnw4z\" (UID: \"6c0d4c8e-606d-4a79-85da-56d503115dde\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.597569 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6c0d4c8e-606d-4a79-85da-56d503115dde-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-tnw4z\" (UID: \"6c0d4c8e-606d-4a79-85da-56d503115dde\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.751306 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.780719 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn"] Dec 03 16:45:28 crc kubenswrapper[5002]: I1203 16:45:28.988896 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z"] Dec 03 16:45:28 crc kubenswrapper[5002]: W1203 16:45:28.995571 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c0d4c8e_606d_4a79_85da_56d503115dde.slice/crio-e3be2488d07ef002a6cf9ae082be0935aa1ae23860ac3998663bd9b971f7fe25 WatchSource:0}: Error finding container e3be2488d07ef002a6cf9ae082be0935aa1ae23860ac3998663bd9b971f7fe25: Status 404 returned error can't find the container with id e3be2488d07ef002a6cf9ae082be0935aa1ae23860ac3998663bd9b971f7fe25 Dec 03 16:45:29 crc kubenswrapper[5002]: I1203 16:45:29.411119 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" event={"ID":"55e48020-7db5-4f57-8c21-0dec9e03ef5c","Type":"ContainerStarted","Data":"17bdb574f965044164165b2d88126eaa47ead63f44ce32218bd8b3b4d8967aca"} Dec 03 16:45:29 crc kubenswrapper[5002]: I1203 16:45:29.413138 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5b9b959497-c77rb" event={"ID":"33290a32-ca54-43b1-a932-a43cac140549","Type":"ContainerStarted","Data":"9b4012016c3b07228d6c32be930e0258d524ef31884a8f610596d5975a0a2563"} Dec 03 16:45:29 crc kubenswrapper[5002]: I1203 16:45:29.413208 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5b9b959497-c77rb" event={"ID":"33290a32-ca54-43b1-a932-a43cac140549","Type":"ContainerStarted","Data":"bc51a2ffa2c2ca33cd7dc089aa6393e7251d324868fddb0bd7356681ef06798b"} Dec 03 16:45:29 crc kubenswrapper[5002]: I1203 16:45:29.414542 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" event={"ID":"6c0d4c8e-606d-4a79-85da-56d503115dde","Type":"ContainerStarted","Data":"e3be2488d07ef002a6cf9ae082be0935aa1ae23860ac3998663bd9b971f7fe25"} Dec 03 16:45:29 crc kubenswrapper[5002]: I1203 16:45:29.442262 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5b9b959497-c77rb" podStartSLOduration=1.4422392880000001 podStartE2EDuration="1.442239288s" podCreationTimestamp="2025-12-03 16:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:45:29.441268612 +0000 UTC m=+852.855090540" watchObservedRunningTime="2025-12-03 16:45:29.442239288 +0000 UTC m=+852.856061176" Dec 03 16:45:29 crc kubenswrapper[5002]: I1203 16:45:29.856251 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:29 crc kubenswrapper[5002]: I1203 16:45:29.908222 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:30 crc kubenswrapper[5002]: I1203 16:45:30.097601 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zl48q"] Dec 03 16:45:31 crc kubenswrapper[5002]: I1203 16:45:31.427789 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zl48q" podUID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" containerName="registry-server" containerID="cri-o://47c69c7f8c32a0d9dab06d129f5a1d5d100c6e80f0fb4d2a3ecdabd5615f022c" gracePeriod=2 Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.436543 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-w2kzn" event={"ID":"a3a0627d-e103-43bc-a5d6-0933fc925543","Type":"ContainerStarted","Data":"6e3cb1da32d64d2214d733e0a0190e64e1ea8b4fe14b7664372400df4304113c"} Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.440933 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" event={"ID":"55e48020-7db5-4f57-8c21-0dec9e03ef5c","Type":"ContainerStarted","Data":"2378061eb49a0018e749172453480ad1b6c5d2e58676ad6df89da616ed386757"} Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.441026 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.445316 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-rd64m" event={"ID":"b952bb42-1df0-4837-9c7e-ab25b7949f89","Type":"ContainerStarted","Data":"f0c3050c3649a89284cc2a3e8b695ca0d693ae18f2799657598d86ac846f12b4"} Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.445496 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.450326 5002 generic.go:334] "Generic (PLEG): container finished" podID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" containerID="47c69c7f8c32a0d9dab06d129f5a1d5d100c6e80f0fb4d2a3ecdabd5615f022c" exitCode=0 Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.450501 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zl48q" event={"ID":"84f6062f-c4a6-4417-96b6-f184e52fa5bc","Type":"ContainerDied","Data":"47c69c7f8c32a0d9dab06d129f5a1d5d100c6e80f0fb4d2a3ecdabd5615f022c"} Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.459889 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" podStartSLOduration=3.135056362 podStartE2EDuration="5.459868241s" podCreationTimestamp="2025-12-03 16:45:27 +0000 UTC" firstStartedPulling="2025-12-03 16:45:28.791318006 +0000 UTC m=+852.205139894" lastFinishedPulling="2025-12-03 16:45:31.116129885 +0000 UTC m=+854.529951773" observedRunningTime="2025-12-03 16:45:32.459020498 +0000 UTC m=+855.872842386" watchObservedRunningTime="2025-12-03 16:45:32.459868241 +0000 UTC m=+855.873690139" Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.661089 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.680547 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-rd64m" podStartSLOduration=2.6200469010000003 podStartE2EDuration="5.680524512s" podCreationTimestamp="2025-12-03 16:45:27 +0000 UTC" firstStartedPulling="2025-12-03 16:45:28.031583546 +0000 UTC m=+851.445405434" lastFinishedPulling="2025-12-03 16:45:31.092061157 +0000 UTC m=+854.505883045" observedRunningTime="2025-12-03 16:45:32.481493171 +0000 UTC m=+855.895315059" watchObservedRunningTime="2025-12-03 16:45:32.680524512 +0000 UTC m=+856.094346400" Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.788352 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84f6062f-c4a6-4417-96b6-f184e52fa5bc-catalog-content\") pod \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\" (UID: \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\") " Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.788970 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84f6062f-c4a6-4417-96b6-f184e52fa5bc-utilities\") pod \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\" (UID: \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\") " Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.789101 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntcvh\" (UniqueName: \"kubernetes.io/projected/84f6062f-c4a6-4417-96b6-f184e52fa5bc-kube-api-access-ntcvh\") pod \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\" (UID: \"84f6062f-c4a6-4417-96b6-f184e52fa5bc\") " Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.794055 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84f6062f-c4a6-4417-96b6-f184e52fa5bc-utilities" (OuterVolumeSpecName: "utilities") pod "84f6062f-c4a6-4417-96b6-f184e52fa5bc" (UID: "84f6062f-c4a6-4417-96b6-f184e52fa5bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.797349 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84f6062f-c4a6-4417-96b6-f184e52fa5bc-kube-api-access-ntcvh" (OuterVolumeSpecName: "kube-api-access-ntcvh") pod "84f6062f-c4a6-4417-96b6-f184e52fa5bc" (UID: "84f6062f-c4a6-4417-96b6-f184e52fa5bc"). InnerVolumeSpecName "kube-api-access-ntcvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.891822 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84f6062f-c4a6-4417-96b6-f184e52fa5bc-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.891862 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntcvh\" (UniqueName: \"kubernetes.io/projected/84f6062f-c4a6-4417-96b6-f184e52fa5bc-kube-api-access-ntcvh\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.907020 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84f6062f-c4a6-4417-96b6-f184e52fa5bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84f6062f-c4a6-4417-96b6-f184e52fa5bc" (UID: "84f6062f-c4a6-4417-96b6-f184e52fa5bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:45:32 crc kubenswrapper[5002]: I1203 16:45:32.993775 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84f6062f-c4a6-4417-96b6-f184e52fa5bc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:33 crc kubenswrapper[5002]: I1203 16:45:33.464077 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zl48q" event={"ID":"84f6062f-c4a6-4417-96b6-f184e52fa5bc","Type":"ContainerDied","Data":"f396853788df202816847420a3b80b3aa65c680c07e36176d7dc50a5c47a57c0"} Dec 03 16:45:33 crc kubenswrapper[5002]: I1203 16:45:33.464125 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zl48q" Dec 03 16:45:33 crc kubenswrapper[5002]: I1203 16:45:33.464159 5002 scope.go:117] "RemoveContainer" containerID="47c69c7f8c32a0d9dab06d129f5a1d5d100c6e80f0fb4d2a3ecdabd5615f022c" Dec 03 16:45:33 crc kubenswrapper[5002]: I1203 16:45:33.468532 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" event={"ID":"6c0d4c8e-606d-4a79-85da-56d503115dde","Type":"ContainerStarted","Data":"f3e8c6fa1ba5a48b066556818ebd105325646ec892cb4116780cfc147e11d403"} Dec 03 16:45:33 crc kubenswrapper[5002]: I1203 16:45:33.489778 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tnw4z" podStartSLOduration=2.8157213089999997 podStartE2EDuration="6.489739363s" podCreationTimestamp="2025-12-03 16:45:27 +0000 UTC" firstStartedPulling="2025-12-03 16:45:28.997909944 +0000 UTC m=+852.411731832" lastFinishedPulling="2025-12-03 16:45:32.671927998 +0000 UTC m=+856.085749886" observedRunningTime="2025-12-03 16:45:33.486417513 +0000 UTC m=+856.900239411" watchObservedRunningTime="2025-12-03 16:45:33.489739363 +0000 UTC m=+856.903561251" Dec 03 16:45:33 crc kubenswrapper[5002]: I1203 16:45:33.501612 5002 scope.go:117] "RemoveContainer" containerID="1f37d32430944be13b59c07dab92fbd77de727123e567de0fd0ae039c1baedfb" Dec 03 16:45:33 crc kubenswrapper[5002]: I1203 16:45:33.523855 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zl48q"] Dec 03 16:45:33 crc kubenswrapper[5002]: I1203 16:45:33.528961 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zl48q"] Dec 03 16:45:33 crc kubenswrapper[5002]: I1203 16:45:33.559290 5002 scope.go:117] "RemoveContainer" containerID="1a92776f066a3655c141ee4e4b280ee425cf0922d626bcf87d9950e5bc33691c" Dec 03 16:45:34 crc kubenswrapper[5002]: I1203 16:45:34.852729 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" path="/var/lib/kubelet/pods/84f6062f-c4a6-4417-96b6-f184e52fa5bc/volumes" Dec 03 16:45:35 crc kubenswrapper[5002]: I1203 16:45:35.491338 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-w2kzn" event={"ID":"a3a0627d-e103-43bc-a5d6-0933fc925543","Type":"ContainerStarted","Data":"939eabb58d75f7e7fdf94ffc1d5d8d0a934553782462e708143707aa642207c0"} Dec 03 16:45:35 crc kubenswrapper[5002]: I1203 16:45:35.519474 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-w2kzn" podStartSLOduration=2.153564171 podStartE2EDuration="8.519437098s" podCreationTimestamp="2025-12-03 16:45:27 +0000 UTC" firstStartedPulling="2025-12-03 16:45:28.274310549 +0000 UTC m=+851.688132437" lastFinishedPulling="2025-12-03 16:45:34.640183466 +0000 UTC m=+858.054005364" observedRunningTime="2025-12-03 16:45:35.517511565 +0000 UTC m=+858.931333463" watchObservedRunningTime="2025-12-03 16:45:35.519437098 +0000 UTC m=+858.933259026" Dec 03 16:45:38 crc kubenswrapper[5002]: I1203 16:45:38.032774 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-rd64m" Dec 03 16:45:38 crc kubenswrapper[5002]: I1203 16:45:38.382666 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:38 crc kubenswrapper[5002]: I1203 16:45:38.383084 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:38 crc kubenswrapper[5002]: I1203 16:45:38.389118 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:38 crc kubenswrapper[5002]: I1203 16:45:38.514193 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5b9b959497-c77rb" Dec 03 16:45:38 crc kubenswrapper[5002]: I1203 16:45:38.575074 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-68mq2"] Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.201036 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nw8pc"] Dec 03 16:45:44 crc kubenswrapper[5002]: E1203 16:45:44.202139 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" containerName="extract-utilities" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.202159 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" containerName="extract-utilities" Dec 03 16:45:44 crc kubenswrapper[5002]: E1203 16:45:44.202170 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" containerName="extract-content" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.202182 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" containerName="extract-content" Dec 03 16:45:44 crc kubenswrapper[5002]: E1203 16:45:44.202206 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" containerName="registry-server" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.202213 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" containerName="registry-server" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.202338 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="84f6062f-c4a6-4417-96b6-f184e52fa5bc" containerName="registry-server" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.203330 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.213342 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nw8pc"] Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.370590 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d83aff31-eac0-4faa-b247-44d881ef56ae-catalog-content\") pod \"redhat-marketplace-nw8pc\" (UID: \"d83aff31-eac0-4faa-b247-44d881ef56ae\") " pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.370917 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwbnl\" (UniqueName: \"kubernetes.io/projected/d83aff31-eac0-4faa-b247-44d881ef56ae-kube-api-access-bwbnl\") pod \"redhat-marketplace-nw8pc\" (UID: \"d83aff31-eac0-4faa-b247-44d881ef56ae\") " pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.371009 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d83aff31-eac0-4faa-b247-44d881ef56ae-utilities\") pod \"redhat-marketplace-nw8pc\" (UID: \"d83aff31-eac0-4faa-b247-44d881ef56ae\") " pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.472555 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d83aff31-eac0-4faa-b247-44d881ef56ae-utilities\") pod \"redhat-marketplace-nw8pc\" (UID: \"d83aff31-eac0-4faa-b247-44d881ef56ae\") " pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.472998 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d83aff31-eac0-4faa-b247-44d881ef56ae-catalog-content\") pod \"redhat-marketplace-nw8pc\" (UID: \"d83aff31-eac0-4faa-b247-44d881ef56ae\") " pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.473053 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d83aff31-eac0-4faa-b247-44d881ef56ae-utilities\") pod \"redhat-marketplace-nw8pc\" (UID: \"d83aff31-eac0-4faa-b247-44d881ef56ae\") " pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.473103 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwbnl\" (UniqueName: \"kubernetes.io/projected/d83aff31-eac0-4faa-b247-44d881ef56ae-kube-api-access-bwbnl\") pod \"redhat-marketplace-nw8pc\" (UID: \"d83aff31-eac0-4faa-b247-44d881ef56ae\") " pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.473571 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d83aff31-eac0-4faa-b247-44d881ef56ae-catalog-content\") pod \"redhat-marketplace-nw8pc\" (UID: \"d83aff31-eac0-4faa-b247-44d881ef56ae\") " pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.501033 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwbnl\" (UniqueName: \"kubernetes.io/projected/d83aff31-eac0-4faa-b247-44d881ef56ae-kube-api-access-bwbnl\") pod \"redhat-marketplace-nw8pc\" (UID: \"d83aff31-eac0-4faa-b247-44d881ef56ae\") " pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:44 crc kubenswrapper[5002]: I1203 16:45:44.539282 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:45 crc kubenswrapper[5002]: I1203 16:45:45.041192 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nw8pc"] Dec 03 16:45:45 crc kubenswrapper[5002]: I1203 16:45:45.553427 5002 generic.go:334] "Generic (PLEG): container finished" podID="d83aff31-eac0-4faa-b247-44d881ef56ae" containerID="c2189ce53f848b9aad24b1e91a46757bb132fce9b840647669e232ecb2131d8c" exitCode=0 Dec 03 16:45:45 crc kubenswrapper[5002]: I1203 16:45:45.553567 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nw8pc" event={"ID":"d83aff31-eac0-4faa-b247-44d881ef56ae","Type":"ContainerDied","Data":"c2189ce53f848b9aad24b1e91a46757bb132fce9b840647669e232ecb2131d8c"} Dec 03 16:45:45 crc kubenswrapper[5002]: I1203 16:45:45.553898 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nw8pc" event={"ID":"d83aff31-eac0-4faa-b247-44d881ef56ae","Type":"ContainerStarted","Data":"63f6fc7e301bad4be314b67b4769ccd674d002f3048673ab33aac7dd2237713c"} Dec 03 16:45:46 crc kubenswrapper[5002]: I1203 16:45:46.560899 5002 generic.go:334] "Generic (PLEG): container finished" podID="d83aff31-eac0-4faa-b247-44d881ef56ae" containerID="0002d091e13bef4b57f933faa434c4b9f6ad00885a9f307b02c8dc2bc1e0a289" exitCode=0 Dec 03 16:45:46 crc kubenswrapper[5002]: I1203 16:45:46.560951 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nw8pc" event={"ID":"d83aff31-eac0-4faa-b247-44d881ef56ae","Type":"ContainerDied","Data":"0002d091e13bef4b57f933faa434c4b9f6ad00885a9f307b02c8dc2bc1e0a289"} Dec 03 16:45:47 crc kubenswrapper[5002]: I1203 16:45:47.569124 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nw8pc" event={"ID":"d83aff31-eac0-4faa-b247-44d881ef56ae","Type":"ContainerStarted","Data":"216769edd21baf901414053191034596ab9ec132d09b8e30a161d3406c381fac"} Dec 03 16:45:47 crc kubenswrapper[5002]: I1203 16:45:47.598352 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nw8pc" podStartSLOduration=2.175075948 podStartE2EDuration="3.598332315s" podCreationTimestamp="2025-12-03 16:45:44 +0000 UTC" firstStartedPulling="2025-12-03 16:45:45.555323417 +0000 UTC m=+868.969145315" lastFinishedPulling="2025-12-03 16:45:46.978579794 +0000 UTC m=+870.392401682" observedRunningTime="2025-12-03 16:45:47.595964881 +0000 UTC m=+871.009786779" watchObservedRunningTime="2025-12-03 16:45:47.598332315 +0000 UTC m=+871.012154203" Dec 03 16:45:48 crc kubenswrapper[5002]: I1203 16:45:48.591625 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tv2qn" Dec 03 16:45:54 crc kubenswrapper[5002]: I1203 16:45:54.539535 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:54 crc kubenswrapper[5002]: I1203 16:45:54.540387 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:54 crc kubenswrapper[5002]: I1203 16:45:54.583162 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:54 crc kubenswrapper[5002]: I1203 16:45:54.657124 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:54 crc kubenswrapper[5002]: I1203 16:45:54.822907 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nw8pc"] Dec 03 16:45:56 crc kubenswrapper[5002]: I1203 16:45:56.624008 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nw8pc" podUID="d83aff31-eac0-4faa-b247-44d881ef56ae" containerName="registry-server" containerID="cri-o://216769edd21baf901414053191034596ab9ec132d09b8e30a161d3406c381fac" gracePeriod=2 Dec 03 16:45:57 crc kubenswrapper[5002]: I1203 16:45:57.632693 5002 generic.go:334] "Generic (PLEG): container finished" podID="d83aff31-eac0-4faa-b247-44d881ef56ae" containerID="216769edd21baf901414053191034596ab9ec132d09b8e30a161d3406c381fac" exitCode=0 Dec 03 16:45:57 crc kubenswrapper[5002]: I1203 16:45:57.632790 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nw8pc" event={"ID":"d83aff31-eac0-4faa-b247-44d881ef56ae","Type":"ContainerDied","Data":"216769edd21baf901414053191034596ab9ec132d09b8e30a161d3406c381fac"} Dec 03 16:45:57 crc kubenswrapper[5002]: I1203 16:45:57.748833 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:57 crc kubenswrapper[5002]: I1203 16:45:57.898636 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwbnl\" (UniqueName: \"kubernetes.io/projected/d83aff31-eac0-4faa-b247-44d881ef56ae-kube-api-access-bwbnl\") pod \"d83aff31-eac0-4faa-b247-44d881ef56ae\" (UID: \"d83aff31-eac0-4faa-b247-44d881ef56ae\") " Dec 03 16:45:57 crc kubenswrapper[5002]: I1203 16:45:57.898798 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d83aff31-eac0-4faa-b247-44d881ef56ae-utilities\") pod \"d83aff31-eac0-4faa-b247-44d881ef56ae\" (UID: \"d83aff31-eac0-4faa-b247-44d881ef56ae\") " Dec 03 16:45:57 crc kubenswrapper[5002]: I1203 16:45:57.898847 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d83aff31-eac0-4faa-b247-44d881ef56ae-catalog-content\") pod \"d83aff31-eac0-4faa-b247-44d881ef56ae\" (UID: \"d83aff31-eac0-4faa-b247-44d881ef56ae\") " Dec 03 16:45:57 crc kubenswrapper[5002]: I1203 16:45:57.900207 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d83aff31-eac0-4faa-b247-44d881ef56ae-utilities" (OuterVolumeSpecName: "utilities") pod "d83aff31-eac0-4faa-b247-44d881ef56ae" (UID: "d83aff31-eac0-4faa-b247-44d881ef56ae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:45:57 crc kubenswrapper[5002]: I1203 16:45:57.921486 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d83aff31-eac0-4faa-b247-44d881ef56ae-kube-api-access-bwbnl" (OuterVolumeSpecName: "kube-api-access-bwbnl") pod "d83aff31-eac0-4faa-b247-44d881ef56ae" (UID: "d83aff31-eac0-4faa-b247-44d881ef56ae"). InnerVolumeSpecName "kube-api-access-bwbnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:45:57 crc kubenswrapper[5002]: I1203 16:45:57.935585 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d83aff31-eac0-4faa-b247-44d881ef56ae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d83aff31-eac0-4faa-b247-44d881ef56ae" (UID: "d83aff31-eac0-4faa-b247-44d881ef56ae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:45:58 crc kubenswrapper[5002]: I1203 16:45:58.001351 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d83aff31-eac0-4faa-b247-44d881ef56ae-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:58 crc kubenswrapper[5002]: I1203 16:45:58.001389 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwbnl\" (UniqueName: \"kubernetes.io/projected/d83aff31-eac0-4faa-b247-44d881ef56ae-kube-api-access-bwbnl\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:58 crc kubenswrapper[5002]: I1203 16:45:58.001403 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d83aff31-eac0-4faa-b247-44d881ef56ae-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:58 crc kubenswrapper[5002]: I1203 16:45:58.642037 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nw8pc" event={"ID":"d83aff31-eac0-4faa-b247-44d881ef56ae","Type":"ContainerDied","Data":"63f6fc7e301bad4be314b67b4769ccd674d002f3048673ab33aac7dd2237713c"} Dec 03 16:45:58 crc kubenswrapper[5002]: I1203 16:45:58.642119 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nw8pc" Dec 03 16:45:58 crc kubenswrapper[5002]: I1203 16:45:58.642913 5002 scope.go:117] "RemoveContainer" containerID="216769edd21baf901414053191034596ab9ec132d09b8e30a161d3406c381fac" Dec 03 16:45:58 crc kubenswrapper[5002]: I1203 16:45:58.676897 5002 scope.go:117] "RemoveContainer" containerID="0002d091e13bef4b57f933faa434c4b9f6ad00885a9f307b02c8dc2bc1e0a289" Dec 03 16:45:58 crc kubenswrapper[5002]: I1203 16:45:58.683770 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nw8pc"] Dec 03 16:45:58 crc kubenswrapper[5002]: I1203 16:45:58.689394 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nw8pc"] Dec 03 16:45:58 crc kubenswrapper[5002]: I1203 16:45:58.696571 5002 scope.go:117] "RemoveContainer" containerID="c2189ce53f848b9aad24b1e91a46757bb132fce9b840647669e232ecb2131d8c" Dec 03 16:45:58 crc kubenswrapper[5002]: I1203 16:45:58.850568 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d83aff31-eac0-4faa-b247-44d881ef56ae" path="/var/lib/kubelet/pods/d83aff31-eac0-4faa-b247-44d881ef56ae/volumes" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.530208 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5"] Dec 03 16:46:01 crc kubenswrapper[5002]: E1203 16:46:01.530965 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d83aff31-eac0-4faa-b247-44d881ef56ae" containerName="registry-server" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.530977 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d83aff31-eac0-4faa-b247-44d881ef56ae" containerName="registry-server" Dec 03 16:46:01 crc kubenswrapper[5002]: E1203 16:46:01.530989 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d83aff31-eac0-4faa-b247-44d881ef56ae" containerName="extract-utilities" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.530995 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d83aff31-eac0-4faa-b247-44d881ef56ae" containerName="extract-utilities" Dec 03 16:46:01 crc kubenswrapper[5002]: E1203 16:46:01.531012 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d83aff31-eac0-4faa-b247-44d881ef56ae" containerName="extract-content" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.531019 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d83aff31-eac0-4faa-b247-44d881ef56ae" containerName="extract-content" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.531124 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d83aff31-eac0-4faa-b247-44d881ef56ae" containerName="registry-server" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.531926 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.534463 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.552405 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvxkf\" (UniqueName: \"kubernetes.io/projected/389ed0b8-f8b8-4949-baf8-83c696910edf-kube-api-access-tvxkf\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5\" (UID: \"389ed0b8-f8b8-4949-baf8-83c696910edf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.552466 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/389ed0b8-f8b8-4949-baf8-83c696910edf-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5\" (UID: \"389ed0b8-f8b8-4949-baf8-83c696910edf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.552625 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/389ed0b8-f8b8-4949-baf8-83c696910edf-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5\" (UID: \"389ed0b8-f8b8-4949-baf8-83c696910edf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.553811 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5"] Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.653898 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/389ed0b8-f8b8-4949-baf8-83c696910edf-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5\" (UID: \"389ed0b8-f8b8-4949-baf8-83c696910edf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.653970 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvxkf\" (UniqueName: \"kubernetes.io/projected/389ed0b8-f8b8-4949-baf8-83c696910edf-kube-api-access-tvxkf\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5\" (UID: \"389ed0b8-f8b8-4949-baf8-83c696910edf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.654002 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/389ed0b8-f8b8-4949-baf8-83c696910edf-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5\" (UID: \"389ed0b8-f8b8-4949-baf8-83c696910edf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.654865 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/389ed0b8-f8b8-4949-baf8-83c696910edf-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5\" (UID: \"389ed0b8-f8b8-4949-baf8-83c696910edf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.654881 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/389ed0b8-f8b8-4949-baf8-83c696910edf-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5\" (UID: \"389ed0b8-f8b8-4949-baf8-83c696910edf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.676506 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvxkf\" (UniqueName: \"kubernetes.io/projected/389ed0b8-f8b8-4949-baf8-83c696910edf-kube-api-access-tvxkf\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5\" (UID: \"389ed0b8-f8b8-4949-baf8-83c696910edf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:01 crc kubenswrapper[5002]: I1203 16:46:01.847137 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:02 crc kubenswrapper[5002]: I1203 16:46:02.328231 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5"] Dec 03 16:46:02 crc kubenswrapper[5002]: I1203 16:46:02.670939 5002 generic.go:334] "Generic (PLEG): container finished" podID="389ed0b8-f8b8-4949-baf8-83c696910edf" containerID="c4a43e0080f989bbc1f3ca2f5ffdce77161afa548b695bf62ee79ffd6358108a" exitCode=0 Dec 03 16:46:02 crc kubenswrapper[5002]: I1203 16:46:02.671136 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" event={"ID":"389ed0b8-f8b8-4949-baf8-83c696910edf","Type":"ContainerDied","Data":"c4a43e0080f989bbc1f3ca2f5ffdce77161afa548b695bf62ee79ffd6358108a"} Dec 03 16:46:02 crc kubenswrapper[5002]: I1203 16:46:02.671376 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" event={"ID":"389ed0b8-f8b8-4949-baf8-83c696910edf","Type":"ContainerStarted","Data":"73f568353f79f852b24944cf03208a2b3d1f1b014e3c555bb10663946718f405"} Dec 03 16:46:03 crc kubenswrapper[5002]: I1203 16:46:03.624127 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-68mq2" podUID="2ff3812c-cb2a-4b07-b140-0f0b97b35e13" containerName="console" containerID="cri-o://1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d" gracePeriod=15 Dec 03 16:46:03 crc kubenswrapper[5002]: I1203 16:46:03.962928 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-68mq2_2ff3812c-cb2a-4b07-b140-0f0b97b35e13/console/0.log" Dec 03 16:46:03 crc kubenswrapper[5002]: I1203 16:46:03.963624 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.122827 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-oauth-serving-cert\") pod \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.123027 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-oauth-config\") pod \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.123073 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-service-ca\") pod \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.123172 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-serving-cert\") pod \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.123212 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-trusted-ca-bundle\") pod \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.123266 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-config\") pod \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.123318 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr2jp\" (UniqueName: \"kubernetes.io/projected/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-kube-api-access-sr2jp\") pod \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\" (UID: \"2ff3812c-cb2a-4b07-b140-0f0b97b35e13\") " Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.123880 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-service-ca" (OuterVolumeSpecName: "service-ca") pod "2ff3812c-cb2a-4b07-b140-0f0b97b35e13" (UID: "2ff3812c-cb2a-4b07-b140-0f0b97b35e13"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.123947 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "2ff3812c-cb2a-4b07-b140-0f0b97b35e13" (UID: "2ff3812c-cb2a-4b07-b140-0f0b97b35e13"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.123966 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "2ff3812c-cb2a-4b07-b140-0f0b97b35e13" (UID: "2ff3812c-cb2a-4b07-b140-0f0b97b35e13"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.124601 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-config" (OuterVolumeSpecName: "console-config") pod "2ff3812c-cb2a-4b07-b140-0f0b97b35e13" (UID: "2ff3812c-cb2a-4b07-b140-0f0b97b35e13"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.130102 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "2ff3812c-cb2a-4b07-b140-0f0b97b35e13" (UID: "2ff3812c-cb2a-4b07-b140-0f0b97b35e13"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.130868 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-kube-api-access-sr2jp" (OuterVolumeSpecName: "kube-api-access-sr2jp") pod "2ff3812c-cb2a-4b07-b140-0f0b97b35e13" (UID: "2ff3812c-cb2a-4b07-b140-0f0b97b35e13"). InnerVolumeSpecName "kube-api-access-sr2jp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.130998 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "2ff3812c-cb2a-4b07-b140-0f0b97b35e13" (UID: "2ff3812c-cb2a-4b07-b140-0f0b97b35e13"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.225020 5002 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.225127 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr2jp\" (UniqueName: \"kubernetes.io/projected/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-kube-api-access-sr2jp\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.225149 5002 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.225166 5002 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.225191 5002 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.225214 5002 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.225237 5002 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ff3812c-cb2a-4b07-b140-0f0b97b35e13-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.697973 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-68mq2_2ff3812c-cb2a-4b07-b140-0f0b97b35e13/console/0.log" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.698051 5002 generic.go:334] "Generic (PLEG): container finished" podID="2ff3812c-cb2a-4b07-b140-0f0b97b35e13" containerID="1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d" exitCode=2 Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.698103 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-68mq2" event={"ID":"2ff3812c-cb2a-4b07-b140-0f0b97b35e13","Type":"ContainerDied","Data":"1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d"} Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.698148 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-68mq2" event={"ID":"2ff3812c-cb2a-4b07-b140-0f0b97b35e13","Type":"ContainerDied","Data":"5f83bd7f04fdd583e1ad96690c27eb6d4efa0f6568bcdbc1348133f3e284ab41"} Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.698198 5002 scope.go:117] "RemoveContainer" containerID="1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.698387 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-68mq2" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.736207 5002 scope.go:117] "RemoveContainer" containerID="1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.736326 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-68mq2"] Dec 03 16:46:04 crc kubenswrapper[5002]: E1203 16:46:04.736817 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d\": container with ID starting with 1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d not found: ID does not exist" containerID="1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.736849 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d"} err="failed to get container status \"1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d\": rpc error: code = NotFound desc = could not find container \"1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d\": container with ID starting with 1909a11ae72c3102b8414fcc753d34d3aae5c9ed6483f5ca8cd3139447a0896d not found: ID does not exist" Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.740890 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-68mq2"] Dec 03 16:46:04 crc kubenswrapper[5002]: I1203 16:46:04.854973 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ff3812c-cb2a-4b07-b140-0f0b97b35e13" path="/var/lib/kubelet/pods/2ff3812c-cb2a-4b07-b140-0f0b97b35e13/volumes" Dec 03 16:46:05 crc kubenswrapper[5002]: I1203 16:46:05.706542 5002 generic.go:334] "Generic (PLEG): container finished" podID="389ed0b8-f8b8-4949-baf8-83c696910edf" containerID="757588decad5a4b4ae7a4ae66fb9dacb1416707fc5017ad42682c088f8707e99" exitCode=0 Dec 03 16:46:05 crc kubenswrapper[5002]: I1203 16:46:05.706618 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" event={"ID":"389ed0b8-f8b8-4949-baf8-83c696910edf","Type":"ContainerDied","Data":"757588decad5a4b4ae7a4ae66fb9dacb1416707fc5017ad42682c088f8707e99"} Dec 03 16:46:06 crc kubenswrapper[5002]: I1203 16:46:06.714920 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" event={"ID":"389ed0b8-f8b8-4949-baf8-83c696910edf","Type":"ContainerDied","Data":"1023cc1d9b818b2303a94c9521c91030b1a516a5bfcf1dd12797f2799041051d"} Dec 03 16:46:06 crc kubenswrapper[5002]: I1203 16:46:06.715203 5002 generic.go:334] "Generic (PLEG): container finished" podID="389ed0b8-f8b8-4949-baf8-83c696910edf" containerID="1023cc1d9b818b2303a94c9521c91030b1a516a5bfcf1dd12797f2799041051d" exitCode=0 Dec 03 16:46:07 crc kubenswrapper[5002]: I1203 16:46:07.951600 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:08 crc kubenswrapper[5002]: I1203 16:46:08.081337 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/389ed0b8-f8b8-4949-baf8-83c696910edf-bundle\") pod \"389ed0b8-f8b8-4949-baf8-83c696910edf\" (UID: \"389ed0b8-f8b8-4949-baf8-83c696910edf\") " Dec 03 16:46:08 crc kubenswrapper[5002]: I1203 16:46:08.081397 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/389ed0b8-f8b8-4949-baf8-83c696910edf-util\") pod \"389ed0b8-f8b8-4949-baf8-83c696910edf\" (UID: \"389ed0b8-f8b8-4949-baf8-83c696910edf\") " Dec 03 16:46:08 crc kubenswrapper[5002]: I1203 16:46:08.081461 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvxkf\" (UniqueName: \"kubernetes.io/projected/389ed0b8-f8b8-4949-baf8-83c696910edf-kube-api-access-tvxkf\") pod \"389ed0b8-f8b8-4949-baf8-83c696910edf\" (UID: \"389ed0b8-f8b8-4949-baf8-83c696910edf\") " Dec 03 16:46:08 crc kubenswrapper[5002]: I1203 16:46:08.082696 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/389ed0b8-f8b8-4949-baf8-83c696910edf-bundle" (OuterVolumeSpecName: "bundle") pod "389ed0b8-f8b8-4949-baf8-83c696910edf" (UID: "389ed0b8-f8b8-4949-baf8-83c696910edf"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[5002]: I1203 16:46:08.092996 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/389ed0b8-f8b8-4949-baf8-83c696910edf-kube-api-access-tvxkf" (OuterVolumeSpecName: "kube-api-access-tvxkf") pod "389ed0b8-f8b8-4949-baf8-83c696910edf" (UID: "389ed0b8-f8b8-4949-baf8-83c696910edf"). InnerVolumeSpecName "kube-api-access-tvxkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[5002]: I1203 16:46:08.093805 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/389ed0b8-f8b8-4949-baf8-83c696910edf-util" (OuterVolumeSpecName: "util") pod "389ed0b8-f8b8-4949-baf8-83c696910edf" (UID: "389ed0b8-f8b8-4949-baf8-83c696910edf"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[5002]: I1203 16:46:08.183387 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvxkf\" (UniqueName: \"kubernetes.io/projected/389ed0b8-f8b8-4949-baf8-83c696910edf-kube-api-access-tvxkf\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[5002]: I1203 16:46:08.183877 5002 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/389ed0b8-f8b8-4949-baf8-83c696910edf-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[5002]: I1203 16:46:08.183893 5002 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/389ed0b8-f8b8-4949-baf8-83c696910edf-util\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[5002]: I1203 16:46:08.731900 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" event={"ID":"389ed0b8-f8b8-4949-baf8-83c696910edf","Type":"ContainerDied","Data":"73f568353f79f852b24944cf03208a2b3d1f1b014e3c555bb10663946718f405"} Dec 03 16:46:08 crc kubenswrapper[5002]: I1203 16:46:08.731944 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73f568353f79f852b24944cf03208a2b3d1f1b014e3c555bb10663946718f405" Dec 03 16:46:08 crc kubenswrapper[5002]: I1203 16:46:08.732069 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.676505 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc"] Dec 03 16:46:17 crc kubenswrapper[5002]: E1203 16:46:17.677095 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="389ed0b8-f8b8-4949-baf8-83c696910edf" containerName="pull" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.677113 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="389ed0b8-f8b8-4949-baf8-83c696910edf" containerName="pull" Dec 03 16:46:17 crc kubenswrapper[5002]: E1203 16:46:17.677127 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="389ed0b8-f8b8-4949-baf8-83c696910edf" containerName="util" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.677134 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="389ed0b8-f8b8-4949-baf8-83c696910edf" containerName="util" Dec 03 16:46:17 crc kubenswrapper[5002]: E1203 16:46:17.677148 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ff3812c-cb2a-4b07-b140-0f0b97b35e13" containerName="console" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.677154 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ff3812c-cb2a-4b07-b140-0f0b97b35e13" containerName="console" Dec 03 16:46:17 crc kubenswrapper[5002]: E1203 16:46:17.677162 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="389ed0b8-f8b8-4949-baf8-83c696910edf" containerName="extract" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.677167 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="389ed0b8-f8b8-4949-baf8-83c696910edf" containerName="extract" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.677279 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="389ed0b8-f8b8-4949-baf8-83c696910edf" containerName="extract" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.677295 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ff3812c-cb2a-4b07-b140-0f0b97b35e13" containerName="console" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.677713 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.680200 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.681031 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.681057 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.681031 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.682384 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-7fm69" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.717124 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc"] Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.804235 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d700b14d-9221-4fea-b580-6bab5def0a78-webhook-cert\") pod \"metallb-operator-controller-manager-5c867b68cc-728xc\" (UID: \"d700b14d-9221-4fea-b580-6bab5def0a78\") " pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.804725 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xq6f5\" (UniqueName: \"kubernetes.io/projected/d700b14d-9221-4fea-b580-6bab5def0a78-kube-api-access-xq6f5\") pod \"metallb-operator-controller-manager-5c867b68cc-728xc\" (UID: \"d700b14d-9221-4fea-b580-6bab5def0a78\") " pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.804903 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d700b14d-9221-4fea-b580-6bab5def0a78-apiservice-cert\") pod \"metallb-operator-controller-manager-5c867b68cc-728xc\" (UID: \"d700b14d-9221-4fea-b580-6bab5def0a78\") " pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.906235 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xq6f5\" (UniqueName: \"kubernetes.io/projected/d700b14d-9221-4fea-b580-6bab5def0a78-kube-api-access-xq6f5\") pod \"metallb-operator-controller-manager-5c867b68cc-728xc\" (UID: \"d700b14d-9221-4fea-b580-6bab5def0a78\") " pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.906651 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d700b14d-9221-4fea-b580-6bab5def0a78-apiservice-cert\") pod \"metallb-operator-controller-manager-5c867b68cc-728xc\" (UID: \"d700b14d-9221-4fea-b580-6bab5def0a78\") " pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.906803 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d700b14d-9221-4fea-b580-6bab5def0a78-webhook-cert\") pod \"metallb-operator-controller-manager-5c867b68cc-728xc\" (UID: \"d700b14d-9221-4fea-b580-6bab5def0a78\") " pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.915983 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d700b14d-9221-4fea-b580-6bab5def0a78-apiservice-cert\") pod \"metallb-operator-controller-manager-5c867b68cc-728xc\" (UID: \"d700b14d-9221-4fea-b580-6bab5def0a78\") " pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.921591 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d700b14d-9221-4fea-b580-6bab5def0a78-webhook-cert\") pod \"metallb-operator-controller-manager-5c867b68cc-728xc\" (UID: \"d700b14d-9221-4fea-b580-6bab5def0a78\") " pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.929423 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xq6f5\" (UniqueName: \"kubernetes.io/projected/d700b14d-9221-4fea-b580-6bab5def0a78-kube-api-access-xq6f5\") pod \"metallb-operator-controller-manager-5c867b68cc-728xc\" (UID: \"d700b14d-9221-4fea-b580-6bab5def0a78\") " pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.943346 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5"] Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.944417 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.946584 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.946739 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-2km9k" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.946968 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 03 16:46:17 crc kubenswrapper[5002]: I1203 16:46:17.991356 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.002086 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5"] Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.109857 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3150e99c-fa83-4bd5-9c80-45e124725b16-webhook-cert\") pod \"metallb-operator-webhook-server-8667694fb-cwcq5\" (UID: \"3150e99c-fa83-4bd5-9c80-45e124725b16\") " pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.109924 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pb8lh\" (UniqueName: \"kubernetes.io/projected/3150e99c-fa83-4bd5-9c80-45e124725b16-kube-api-access-pb8lh\") pod \"metallb-operator-webhook-server-8667694fb-cwcq5\" (UID: \"3150e99c-fa83-4bd5-9c80-45e124725b16\") " pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.110073 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3150e99c-fa83-4bd5-9c80-45e124725b16-apiservice-cert\") pod \"metallb-operator-webhook-server-8667694fb-cwcq5\" (UID: \"3150e99c-fa83-4bd5-9c80-45e124725b16\") " pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.211725 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3150e99c-fa83-4bd5-9c80-45e124725b16-webhook-cert\") pod \"metallb-operator-webhook-server-8667694fb-cwcq5\" (UID: \"3150e99c-fa83-4bd5-9c80-45e124725b16\") " pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.211841 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pb8lh\" (UniqueName: \"kubernetes.io/projected/3150e99c-fa83-4bd5-9c80-45e124725b16-kube-api-access-pb8lh\") pod \"metallb-operator-webhook-server-8667694fb-cwcq5\" (UID: \"3150e99c-fa83-4bd5-9c80-45e124725b16\") " pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.211879 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3150e99c-fa83-4bd5-9c80-45e124725b16-apiservice-cert\") pod \"metallb-operator-webhook-server-8667694fb-cwcq5\" (UID: \"3150e99c-fa83-4bd5-9c80-45e124725b16\") " pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.218818 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3150e99c-fa83-4bd5-9c80-45e124725b16-apiservice-cert\") pod \"metallb-operator-webhook-server-8667694fb-cwcq5\" (UID: \"3150e99c-fa83-4bd5-9c80-45e124725b16\") " pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.220459 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3150e99c-fa83-4bd5-9c80-45e124725b16-webhook-cert\") pod \"metallb-operator-webhook-server-8667694fb-cwcq5\" (UID: \"3150e99c-fa83-4bd5-9c80-45e124725b16\") " pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.228040 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pb8lh\" (UniqueName: \"kubernetes.io/projected/3150e99c-fa83-4bd5-9c80-45e124725b16-kube-api-access-pb8lh\") pod \"metallb-operator-webhook-server-8667694fb-cwcq5\" (UID: \"3150e99c-fa83-4bd5-9c80-45e124725b16\") " pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.269471 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.277226 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc"] Dec 03 16:46:18 crc kubenswrapper[5002]: W1203 16:46:18.288685 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd700b14d_9221_4fea_b580_6bab5def0a78.slice/crio-99839b5eb6738b73aa92583e5ad41d91c8633c44d2a2e6a147594a063ca9d7f6 WatchSource:0}: Error finding container 99839b5eb6738b73aa92583e5ad41d91c8633c44d2a2e6a147594a063ca9d7f6: Status 404 returned error can't find the container with id 99839b5eb6738b73aa92583e5ad41d91c8633c44d2a2e6a147594a063ca9d7f6 Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.494319 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5"] Dec 03 16:46:18 crc kubenswrapper[5002]: W1203 16:46:18.499777 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3150e99c_fa83_4bd5_9c80_45e124725b16.slice/crio-d24f660997ee534e0ba3d5e7e394744f237cf163238855082c9560abe727be07 WatchSource:0}: Error finding container d24f660997ee534e0ba3d5e7e394744f237cf163238855082c9560abe727be07: Status 404 returned error can't find the container with id d24f660997ee534e0ba3d5e7e394744f237cf163238855082c9560abe727be07 Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.788389 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" event={"ID":"3150e99c-fa83-4bd5-9c80-45e124725b16","Type":"ContainerStarted","Data":"d24f660997ee534e0ba3d5e7e394744f237cf163238855082c9560abe727be07"} Dec 03 16:46:18 crc kubenswrapper[5002]: I1203 16:46:18.789586 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" event={"ID":"d700b14d-9221-4fea-b580-6bab5def0a78","Type":"ContainerStarted","Data":"99839b5eb6738b73aa92583e5ad41d91c8633c44d2a2e6a147594a063ca9d7f6"} Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.593737 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-llq2r"] Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.595440 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.611818 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-llq2r"] Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.763459 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-utilities\") pod \"community-operators-llq2r\" (UID: \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\") " pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.763559 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdn9d\" (UniqueName: \"kubernetes.io/projected/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-kube-api-access-mdn9d\") pod \"community-operators-llq2r\" (UID: \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\") " pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.763609 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-catalog-content\") pod \"community-operators-llq2r\" (UID: \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\") " pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.865443 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-catalog-content\") pod \"community-operators-llq2r\" (UID: \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\") " pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.865511 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-utilities\") pod \"community-operators-llq2r\" (UID: \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\") " pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.865550 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdn9d\" (UniqueName: \"kubernetes.io/projected/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-kube-api-access-mdn9d\") pod \"community-operators-llq2r\" (UID: \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\") " pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.866059 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-catalog-content\") pod \"community-operators-llq2r\" (UID: \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\") " pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.866059 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-utilities\") pod \"community-operators-llq2r\" (UID: \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\") " pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.889324 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdn9d\" (UniqueName: \"kubernetes.io/projected/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-kube-api-access-mdn9d\") pod \"community-operators-llq2r\" (UID: \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\") " pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:21 crc kubenswrapper[5002]: I1203 16:46:21.916866 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:22 crc kubenswrapper[5002]: I1203 16:46:22.321258 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-llq2r"] Dec 03 16:46:22 crc kubenswrapper[5002]: I1203 16:46:22.813503 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" event={"ID":"d700b14d-9221-4fea-b580-6bab5def0a78","Type":"ContainerStarted","Data":"6152f3d43994d795a162e2bc12be8723b7566c71e41bae639162f6555f171b52"} Dec 03 16:46:22 crc kubenswrapper[5002]: I1203 16:46:22.813929 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:23 crc kubenswrapper[5002]: I1203 16:46:23.830775 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llq2r" event={"ID":"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99","Type":"ContainerStarted","Data":"8325cf81ebe225611fcbd9c7f6fe24ecb513a28c0088f3a0bc5fe412df73ebf6"} Dec 03 16:46:24 crc kubenswrapper[5002]: I1203 16:46:24.838391 5002 generic.go:334] "Generic (PLEG): container finished" podID="c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" containerID="483db872b7ae10917ae529fe7be37277b7fcfa40e10057e296c9c8fef3a45a18" exitCode=0 Dec 03 16:46:24 crc kubenswrapper[5002]: I1203 16:46:24.838457 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llq2r" event={"ID":"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99","Type":"ContainerDied","Data":"483db872b7ae10917ae529fe7be37277b7fcfa40e10057e296c9c8fef3a45a18"} Dec 03 16:46:24 crc kubenswrapper[5002]: I1203 16:46:24.848100 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:24 crc kubenswrapper[5002]: I1203 16:46:24.848145 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" event={"ID":"3150e99c-fa83-4bd5-9c80-45e124725b16","Type":"ContainerStarted","Data":"15f858349e0e59dabc7dda6e1ed94c3cc938e3bd71d769f9cb9c29215444885e"} Dec 03 16:46:24 crc kubenswrapper[5002]: I1203 16:46:24.867837 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" podStartSLOduration=4.028224831 podStartE2EDuration="7.867812212s" podCreationTimestamp="2025-12-03 16:46:17 +0000 UTC" firstStartedPulling="2025-12-03 16:46:18.292529202 +0000 UTC m=+901.706351090" lastFinishedPulling="2025-12-03 16:46:22.132116583 +0000 UTC m=+905.545938471" observedRunningTime="2025-12-03 16:46:22.836206055 +0000 UTC m=+906.250027943" watchObservedRunningTime="2025-12-03 16:46:24.867812212 +0000 UTC m=+908.281634100" Dec 03 16:46:24 crc kubenswrapper[5002]: I1203 16:46:24.888827 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" podStartSLOduration=2.464275286 podStartE2EDuration="7.888736013s" podCreationTimestamp="2025-12-03 16:46:17 +0000 UTC" firstStartedPulling="2025-12-03 16:46:18.502552623 +0000 UTC m=+901.916374511" lastFinishedPulling="2025-12-03 16:46:23.92701335 +0000 UTC m=+907.340835238" observedRunningTime="2025-12-03 16:46:24.883851269 +0000 UTC m=+908.297673167" watchObservedRunningTime="2025-12-03 16:46:24.888736013 +0000 UTC m=+908.302557901" Dec 03 16:46:26 crc kubenswrapper[5002]: I1203 16:46:26.853850 5002 generic.go:334] "Generic (PLEG): container finished" podID="c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" containerID="5c3fa4712babd7041c7f139fd23c8b7f30494f9b80481b2c61cf248eae7402ab" exitCode=0 Dec 03 16:46:26 crc kubenswrapper[5002]: I1203 16:46:26.854171 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llq2r" event={"ID":"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99","Type":"ContainerDied","Data":"5c3fa4712babd7041c7f139fd23c8b7f30494f9b80481b2c61cf248eae7402ab"} Dec 03 16:46:27 crc kubenswrapper[5002]: I1203 16:46:27.865277 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llq2r" event={"ID":"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99","Type":"ContainerStarted","Data":"6866b0feddbc2f44f2000b01f24b2bcd933b74903543419b775cbf6a037e9064"} Dec 03 16:46:27 crc kubenswrapper[5002]: I1203 16:46:27.888679 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-llq2r" podStartSLOduration=4.483591435 podStartE2EDuration="6.888651192s" podCreationTimestamp="2025-12-03 16:46:21 +0000 UTC" firstStartedPulling="2025-12-03 16:46:24.841605827 +0000 UTC m=+908.255427715" lastFinishedPulling="2025-12-03 16:46:27.246665584 +0000 UTC m=+910.660487472" observedRunningTime="2025-12-03 16:46:27.886168424 +0000 UTC m=+911.299990312" watchObservedRunningTime="2025-12-03 16:46:27.888651192 +0000 UTC m=+911.302473080" Dec 03 16:46:31 crc kubenswrapper[5002]: I1203 16:46:31.917315 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:31 crc kubenswrapper[5002]: I1203 16:46:31.918732 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:32 crc kubenswrapper[5002]: I1203 16:46:32.047012 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:32 crc kubenswrapper[5002]: I1203 16:46:32.950204 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:34 crc kubenswrapper[5002]: I1203 16:46:34.380369 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-llq2r"] Dec 03 16:46:35 crc kubenswrapper[5002]: I1203 16:46:35.915362 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-llq2r" podUID="c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" containerName="registry-server" containerID="cri-o://6866b0feddbc2f44f2000b01f24b2bcd933b74903543419b775cbf6a037e9064" gracePeriod=2 Dec 03 16:46:36 crc kubenswrapper[5002]: I1203 16:46:36.924933 5002 generic.go:334] "Generic (PLEG): container finished" podID="c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" containerID="6866b0feddbc2f44f2000b01f24b2bcd933b74903543419b775cbf6a037e9064" exitCode=0 Dec 03 16:46:36 crc kubenswrapper[5002]: I1203 16:46:36.925090 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llq2r" event={"ID":"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99","Type":"ContainerDied","Data":"6866b0feddbc2f44f2000b01f24b2bcd933b74903543419b775cbf6a037e9064"} Dec 03 16:46:37 crc kubenswrapper[5002]: I1203 16:46:37.712380 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:37 crc kubenswrapper[5002]: I1203 16:46:37.906101 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-utilities\") pod \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\" (UID: \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\") " Dec 03 16:46:37 crc kubenswrapper[5002]: I1203 16:46:37.906197 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdn9d\" (UniqueName: \"kubernetes.io/projected/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-kube-api-access-mdn9d\") pod \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\" (UID: \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\") " Dec 03 16:46:37 crc kubenswrapper[5002]: I1203 16:46:37.906723 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-catalog-content\") pod \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\" (UID: \"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99\") " Dec 03 16:46:37 crc kubenswrapper[5002]: I1203 16:46:37.907611 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-utilities" (OuterVolumeSpecName: "utilities") pod "c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" (UID: "c13db3f3-8600-4b69-9b0a-6bf6ab3fde99"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:46:37 crc kubenswrapper[5002]: I1203 16:46:37.915193 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-kube-api-access-mdn9d" (OuterVolumeSpecName: "kube-api-access-mdn9d") pod "c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" (UID: "c13db3f3-8600-4b69-9b0a-6bf6ab3fde99"). InnerVolumeSpecName "kube-api-access-mdn9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:37 crc kubenswrapper[5002]: I1203 16:46:37.935675 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-llq2r" event={"ID":"c13db3f3-8600-4b69-9b0a-6bf6ab3fde99","Type":"ContainerDied","Data":"8325cf81ebe225611fcbd9c7f6fe24ecb513a28c0088f3a0bc5fe412df73ebf6"} Dec 03 16:46:37 crc kubenswrapper[5002]: I1203 16:46:37.935771 5002 scope.go:117] "RemoveContainer" containerID="6866b0feddbc2f44f2000b01f24b2bcd933b74903543419b775cbf6a037e9064" Dec 03 16:46:37 crc kubenswrapper[5002]: I1203 16:46:37.935843 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-llq2r" Dec 03 16:46:37 crc kubenswrapper[5002]: I1203 16:46:37.956794 5002 scope.go:117] "RemoveContainer" containerID="5c3fa4712babd7041c7f139fd23c8b7f30494f9b80481b2c61cf248eae7402ab" Dec 03 16:46:37 crc kubenswrapper[5002]: I1203 16:46:37.965055 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" (UID: "c13db3f3-8600-4b69-9b0a-6bf6ab3fde99"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:46:37 crc kubenswrapper[5002]: I1203 16:46:37.983167 5002 scope.go:117] "RemoveContainer" containerID="483db872b7ae10917ae529fe7be37277b7fcfa40e10057e296c9c8fef3a45a18" Dec 03 16:46:38 crc kubenswrapper[5002]: I1203 16:46:38.008335 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:38 crc kubenswrapper[5002]: I1203 16:46:38.008386 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:38 crc kubenswrapper[5002]: I1203 16:46:38.008401 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdn9d\" (UniqueName: \"kubernetes.io/projected/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99-kube-api-access-mdn9d\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:38 crc kubenswrapper[5002]: I1203 16:46:38.269247 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-llq2r"] Dec 03 16:46:38 crc kubenswrapper[5002]: I1203 16:46:38.278316 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-llq2r"] Dec 03 16:46:38 crc kubenswrapper[5002]: I1203 16:46:38.280969 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-8667694fb-cwcq5" Dec 03 16:46:38 crc kubenswrapper[5002]: I1203 16:46:38.849475 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" path="/var/lib/kubelet/pods/c13db3f3-8600-4b69-9b0a-6bf6ab3fde99/volumes" Dec 03 16:46:57 crc kubenswrapper[5002]: I1203 16:46:57.995553 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5c867b68cc-728xc" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.798934 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-wxdxf"] Dec 03 16:46:58 crc kubenswrapper[5002]: E1203 16:46:58.799559 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" containerName="extract-utilities" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.799636 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" containerName="extract-utilities" Dec 03 16:46:58 crc kubenswrapper[5002]: E1203 16:46:58.799698 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" containerName="extract-content" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.799768 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" containerName="extract-content" Dec 03 16:46:58 crc kubenswrapper[5002]: E1203 16:46:58.799847 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" containerName="registry-server" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.799900 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" containerName="registry-server" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.800059 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c13db3f3-8600-4b69-9b0a-6bf6ab3fde99" containerName="registry-server" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.806215 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.810276 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.810428 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.810980 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-wdmj9" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.825357 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf"] Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.826303 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.831334 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.847815 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf"] Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.912255 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/6c74eee1-29ec-4886-ada2-083436d4dc82-frr-startup\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.912304 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/6c74eee1-29ec-4886-ada2-083436d4dc82-reloader\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.912330 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqj4d\" (UniqueName: \"kubernetes.io/projected/6c695792-74ee-418f-92fd-4bcb18beeb5d-kube-api-access-wqj4d\") pod \"frr-k8s-webhook-server-7fcb986d4-wvmbf\" (UID: \"6c695792-74ee-418f-92fd-4bcb18beeb5d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.912362 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/6c74eee1-29ec-4886-ada2-083436d4dc82-metrics\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.912385 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c695792-74ee-418f-92fd-4bcb18beeb5d-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-wvmbf\" (UID: \"6c695792-74ee-418f-92fd-4bcb18beeb5d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.912402 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/6c74eee1-29ec-4886-ada2-083436d4dc82-frr-conf\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.912420 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/6c74eee1-29ec-4886-ada2-083436d4dc82-frr-sockets\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.912483 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c74eee1-29ec-4886-ada2-083436d4dc82-metrics-certs\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.912537 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-289b7\" (UniqueName: \"kubernetes.io/projected/6c74eee1-29ec-4886-ada2-083436d4dc82-kube-api-access-289b7\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.917354 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-j7qgc"] Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.919081 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-j7qgc" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.922284 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.922479 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-vvr2n" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.923429 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.923587 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-wqxqr"] Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.924469 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.924897 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.928124 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-wqxqr"] Dec 03 16:46:58 crc kubenswrapper[5002]: I1203 16:46:58.928937 5002 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014084 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-289b7\" (UniqueName: \"kubernetes.io/projected/6c74eee1-29ec-4886-ada2-083436d4dc82-kube-api-access-289b7\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014161 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr8ff\" (UniqueName: \"kubernetes.io/projected/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-kube-api-access-pr8ff\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014199 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f46647a-b329-42a7-9372-12ffde9fbb5f-metrics-certs\") pod \"controller-f8648f98b-wqxqr\" (UID: \"6f46647a-b329-42a7-9372-12ffde9fbb5f\") " pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014245 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/6c74eee1-29ec-4886-ada2-083436d4dc82-frr-startup\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014268 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/6c74eee1-29ec-4886-ada2-083436d4dc82-reloader\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014289 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqj4d\" (UniqueName: \"kubernetes.io/projected/6c695792-74ee-418f-92fd-4bcb18beeb5d-kube-api-access-wqj4d\") pod \"frr-k8s-webhook-server-7fcb986d4-wvmbf\" (UID: \"6c695792-74ee-418f-92fd-4bcb18beeb5d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014316 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-metrics-certs\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014348 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/6c74eee1-29ec-4886-ada2-083436d4dc82-metrics\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014371 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl8s2\" (UniqueName: \"kubernetes.io/projected/6f46647a-b329-42a7-9372-12ffde9fbb5f-kube-api-access-rl8s2\") pod \"controller-f8648f98b-wqxqr\" (UID: \"6f46647a-b329-42a7-9372-12ffde9fbb5f\") " pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014388 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c695792-74ee-418f-92fd-4bcb18beeb5d-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-wvmbf\" (UID: \"6c695792-74ee-418f-92fd-4bcb18beeb5d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014406 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/6c74eee1-29ec-4886-ada2-083436d4dc82-frr-conf\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014427 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/6c74eee1-29ec-4886-ada2-083436d4dc82-frr-sockets\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014442 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-metallb-excludel2\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014468 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6f46647a-b329-42a7-9372-12ffde9fbb5f-cert\") pod \"controller-f8648f98b-wqxqr\" (UID: \"6f46647a-b329-42a7-9372-12ffde9fbb5f\") " pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014488 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-memberlist\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.014506 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c74eee1-29ec-4886-ada2-083436d4dc82-metrics-certs\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.016220 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/6c74eee1-29ec-4886-ada2-083436d4dc82-metrics\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.016494 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/6c74eee1-29ec-4886-ada2-083436d4dc82-reloader\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.016789 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/6c74eee1-29ec-4886-ada2-083436d4dc82-frr-sockets\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: E1203 16:46:59.016887 5002 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 03 16:46:59 crc kubenswrapper[5002]: E1203 16:46:59.016940 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c695792-74ee-418f-92fd-4bcb18beeb5d-cert podName:6c695792-74ee-418f-92fd-4bcb18beeb5d nodeName:}" failed. No retries permitted until 2025-12-03 16:46:59.516923974 +0000 UTC m=+942.930745862 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6c695792-74ee-418f-92fd-4bcb18beeb5d-cert") pod "frr-k8s-webhook-server-7fcb986d4-wvmbf" (UID: "6c695792-74ee-418f-92fd-4bcb18beeb5d") : secret "frr-k8s-webhook-server-cert" not found Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.017179 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/6c74eee1-29ec-4886-ada2-083436d4dc82-frr-startup\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.018054 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/6c74eee1-29ec-4886-ada2-083436d4dc82-frr-conf\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.024678 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c74eee1-29ec-4886-ada2-083436d4dc82-metrics-certs\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.049732 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqj4d\" (UniqueName: \"kubernetes.io/projected/6c695792-74ee-418f-92fd-4bcb18beeb5d-kube-api-access-wqj4d\") pod \"frr-k8s-webhook-server-7fcb986d4-wvmbf\" (UID: \"6c695792-74ee-418f-92fd-4bcb18beeb5d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.052823 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-289b7\" (UniqueName: \"kubernetes.io/projected/6c74eee1-29ec-4886-ada2-083436d4dc82-kube-api-access-289b7\") pod \"frr-k8s-wxdxf\" (UID: \"6c74eee1-29ec-4886-ada2-083436d4dc82\") " pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.116443 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr8ff\" (UniqueName: \"kubernetes.io/projected/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-kube-api-access-pr8ff\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.116530 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f46647a-b329-42a7-9372-12ffde9fbb5f-metrics-certs\") pod \"controller-f8648f98b-wqxqr\" (UID: \"6f46647a-b329-42a7-9372-12ffde9fbb5f\") " pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.116589 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-metrics-certs\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.116633 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl8s2\" (UniqueName: \"kubernetes.io/projected/6f46647a-b329-42a7-9372-12ffde9fbb5f-kube-api-access-rl8s2\") pod \"controller-f8648f98b-wqxqr\" (UID: \"6f46647a-b329-42a7-9372-12ffde9fbb5f\") " pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.116680 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-metallb-excludel2\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:46:59 crc kubenswrapper[5002]: E1203 16:46:59.116687 5002 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.116705 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6f46647a-b329-42a7-9372-12ffde9fbb5f-cert\") pod \"controller-f8648f98b-wqxqr\" (UID: \"6f46647a-b329-42a7-9372-12ffde9fbb5f\") " pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.116727 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-memberlist\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:46:59 crc kubenswrapper[5002]: E1203 16:46:59.116785 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f46647a-b329-42a7-9372-12ffde9fbb5f-metrics-certs podName:6f46647a-b329-42a7-9372-12ffde9fbb5f nodeName:}" failed. No retries permitted until 2025-12-03 16:46:59.616761238 +0000 UTC m=+943.030583126 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f46647a-b329-42a7-9372-12ffde9fbb5f-metrics-certs") pod "controller-f8648f98b-wqxqr" (UID: "6f46647a-b329-42a7-9372-12ffde9fbb5f") : secret "controller-certs-secret" not found Dec 03 16:46:59 crc kubenswrapper[5002]: E1203 16:46:59.116879 5002 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 16:46:59 crc kubenswrapper[5002]: E1203 16:46:59.116915 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-memberlist podName:1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4 nodeName:}" failed. No retries permitted until 2025-12-03 16:46:59.616904672 +0000 UTC m=+943.030726560 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-memberlist") pod "speaker-j7qgc" (UID: "1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4") : secret "metallb-memberlist" not found Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.117508 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-metallb-excludel2\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.120144 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6f46647a-b329-42a7-9372-12ffde9fbb5f-cert\") pod \"controller-f8648f98b-wqxqr\" (UID: \"6f46647a-b329-42a7-9372-12ffde9fbb5f\") " pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.122276 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-metrics-certs\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.125175 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.136285 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr8ff\" (UniqueName: \"kubernetes.io/projected/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-kube-api-access-pr8ff\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.142149 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl8s2\" (UniqueName: \"kubernetes.io/projected/6f46647a-b329-42a7-9372-12ffde9fbb5f-kube-api-access-rl8s2\") pod \"controller-f8648f98b-wqxqr\" (UID: \"6f46647a-b329-42a7-9372-12ffde9fbb5f\") " pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.206104 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hlkqw"] Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.207637 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.227651 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hlkqw"] Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.320698 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-utilities\") pod \"certified-operators-hlkqw\" (UID: \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\") " pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.320813 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-catalog-content\") pod \"certified-operators-hlkqw\" (UID: \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\") " pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.320964 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwln9\" (UniqueName: \"kubernetes.io/projected/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-kube-api-access-bwln9\") pod \"certified-operators-hlkqw\" (UID: \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\") " pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.422565 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-utilities\") pod \"certified-operators-hlkqw\" (UID: \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\") " pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.422697 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-catalog-content\") pod \"certified-operators-hlkqw\" (UID: \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\") " pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.422829 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwln9\" (UniqueName: \"kubernetes.io/projected/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-kube-api-access-bwln9\") pod \"certified-operators-hlkqw\" (UID: \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\") " pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.424687 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-utilities\") pod \"certified-operators-hlkqw\" (UID: \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\") " pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.424986 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-catalog-content\") pod \"certified-operators-hlkqw\" (UID: \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\") " pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.443769 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwln9\" (UniqueName: \"kubernetes.io/projected/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-kube-api-access-bwln9\") pod \"certified-operators-hlkqw\" (UID: \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\") " pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.524533 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c695792-74ee-418f-92fd-4bcb18beeb5d-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-wvmbf\" (UID: \"6c695792-74ee-418f-92fd-4bcb18beeb5d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.528302 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c695792-74ee-418f-92fd-4bcb18beeb5d-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-wvmbf\" (UID: \"6c695792-74ee-418f-92fd-4bcb18beeb5d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.542100 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.625807 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f46647a-b329-42a7-9372-12ffde9fbb5f-metrics-certs\") pod \"controller-f8648f98b-wqxqr\" (UID: \"6f46647a-b329-42a7-9372-12ffde9fbb5f\") " pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.625914 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-memberlist\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:46:59 crc kubenswrapper[5002]: E1203 16:46:59.626070 5002 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 16:46:59 crc kubenswrapper[5002]: E1203 16:46:59.626122 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-memberlist podName:1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4 nodeName:}" failed. No retries permitted until 2025-12-03 16:47:00.626109156 +0000 UTC m=+944.039931044 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-memberlist") pod "speaker-j7qgc" (UID: "1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4") : secret "metallb-memberlist" not found Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.632966 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f46647a-b329-42a7-9372-12ffde9fbb5f-metrics-certs\") pod \"controller-f8648f98b-wqxqr\" (UID: \"6f46647a-b329-42a7-9372-12ffde9fbb5f\") " pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.748142 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.884776 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:46:59 crc kubenswrapper[5002]: I1203 16:46:59.996069 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hlkqw"] Dec 03 16:47:00 crc kubenswrapper[5002]: I1203 16:47:00.086559 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf"] Dec 03 16:47:00 crc kubenswrapper[5002]: I1203 16:47:00.095294 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hlkqw" event={"ID":"f888e6fe-2c5e-492f-b368-3c05ffd20b8c","Type":"ContainerStarted","Data":"a4cb11c9c2bf89d2396f135df74ca4f4a2d8e49f561095ed95ef7b00786d966f"} Dec 03 16:47:00 crc kubenswrapper[5002]: I1203 16:47:00.101702 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wxdxf" event={"ID":"6c74eee1-29ec-4886-ada2-083436d4dc82","Type":"ContainerStarted","Data":"6d63942dd7683bf64f0be3d33adad72a1458b5178dc042a0f98d90f899a30cac"} Dec 03 16:47:00 crc kubenswrapper[5002]: E1203 16:47:00.452488 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf888e6fe_2c5e_492f_b368_3c05ffd20b8c.slice/crio-conmon-9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf888e6fe_2c5e_492f_b368_3c05ffd20b8c.slice/crio-9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e.scope\": RecentStats: unable to find data in memory cache]" Dec 03 16:47:00 crc kubenswrapper[5002]: I1203 16:47:00.452587 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-wqxqr"] Dec 03 16:47:00 crc kubenswrapper[5002]: I1203 16:47:00.644535 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-memberlist\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:47:00 crc kubenswrapper[5002]: E1203 16:47:00.644772 5002 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 16:47:00 crc kubenswrapper[5002]: E1203 16:47:00.644870 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-memberlist podName:1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4 nodeName:}" failed. No retries permitted until 2025-12-03 16:47:02.644851055 +0000 UTC m=+946.058672943 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-memberlist") pod "speaker-j7qgc" (UID: "1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4") : secret "metallb-memberlist" not found Dec 03 16:47:01 crc kubenswrapper[5002]: I1203 16:47:01.116728 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-wqxqr" event={"ID":"6f46647a-b329-42a7-9372-12ffde9fbb5f","Type":"ContainerStarted","Data":"e9e8c890a9ea3e6a4df0150ecd88b169ca6439e9b11ad82ce013cbebfd5e03ea"} Dec 03 16:47:01 crc kubenswrapper[5002]: I1203 16:47:01.117087 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-wqxqr" event={"ID":"6f46647a-b329-42a7-9372-12ffde9fbb5f","Type":"ContainerStarted","Data":"2321fb5d384155734dad04a7ff6cc2be1341541bb161df0e3a8ab50267e8aa5e"} Dec 03 16:47:01 crc kubenswrapper[5002]: I1203 16:47:01.118991 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" event={"ID":"6c695792-74ee-418f-92fd-4bcb18beeb5d","Type":"ContainerStarted","Data":"06052c045e4feb6b73a21d7cd7be5cf9b13e123e549352464b538aea34663ece"} Dec 03 16:47:01 crc kubenswrapper[5002]: I1203 16:47:01.120989 5002 generic.go:334] "Generic (PLEG): container finished" podID="f888e6fe-2c5e-492f-b368-3c05ffd20b8c" containerID="9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e" exitCode=0 Dec 03 16:47:01 crc kubenswrapper[5002]: I1203 16:47:01.121018 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hlkqw" event={"ID":"f888e6fe-2c5e-492f-b368-3c05ffd20b8c","Type":"ContainerDied","Data":"9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e"} Dec 03 16:47:02 crc kubenswrapper[5002]: I1203 16:47:02.128935 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-wqxqr" event={"ID":"6f46647a-b329-42a7-9372-12ffde9fbb5f","Type":"ContainerStarted","Data":"e36635b32ac6b0de42f18993b3493a3ccd2d5483820da53c16262c6f1f851a8f"} Dec 03 16:47:02 crc kubenswrapper[5002]: I1203 16:47:02.130151 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:47:02 crc kubenswrapper[5002]: I1203 16:47:02.154189 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-wqxqr" podStartSLOduration=4.154168621 podStartE2EDuration="4.154168621s" podCreationTimestamp="2025-12-03 16:46:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:47:02.149192015 +0000 UTC m=+945.563013903" watchObservedRunningTime="2025-12-03 16:47:02.154168621 +0000 UTC m=+945.567990529" Dec 03 16:47:02 crc kubenswrapper[5002]: I1203 16:47:02.673495 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-memberlist\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:47:02 crc kubenswrapper[5002]: I1203 16:47:02.697496 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4-memberlist\") pod \"speaker-j7qgc\" (UID: \"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4\") " pod="metallb-system/speaker-j7qgc" Dec 03 16:47:02 crc kubenswrapper[5002]: I1203 16:47:02.875393 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-j7qgc" Dec 03 16:47:03 crc kubenswrapper[5002]: I1203 16:47:03.134844 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-j7qgc" event={"ID":"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4","Type":"ContainerStarted","Data":"5737af4f582757ec9134943a7b299437d09c311ea35228b357cd2e882a835eb5"} Dec 03 16:47:03 crc kubenswrapper[5002]: I1203 16:47:03.136866 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hlkqw" event={"ID":"f888e6fe-2c5e-492f-b368-3c05ffd20b8c","Type":"ContainerStarted","Data":"13c7dd77975e267444aeec8d399dbfeaac5e8112b7461b3d3a43ce091a9cc2c9"} Dec 03 16:47:04 crc kubenswrapper[5002]: I1203 16:47:04.152679 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-j7qgc" event={"ID":"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4","Type":"ContainerStarted","Data":"c7f8825d9f1cad6b2fe00deb4ad13b4163878a5ebe0f9754ca3916358f018f8e"} Dec 03 16:47:04 crc kubenswrapper[5002]: I1203 16:47:04.153049 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-j7qgc" event={"ID":"1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4","Type":"ContainerStarted","Data":"0d538871f09270f0b2a8efe9cc3651a386bbf4c4bf8af0a00222d0d04af1c472"} Dec 03 16:47:04 crc kubenswrapper[5002]: I1203 16:47:04.153069 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-j7qgc" Dec 03 16:47:04 crc kubenswrapper[5002]: I1203 16:47:04.156141 5002 generic.go:334] "Generic (PLEG): container finished" podID="f888e6fe-2c5e-492f-b368-3c05ffd20b8c" containerID="13c7dd77975e267444aeec8d399dbfeaac5e8112b7461b3d3a43ce091a9cc2c9" exitCode=0 Dec 03 16:47:04 crc kubenswrapper[5002]: I1203 16:47:04.156298 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hlkqw" event={"ID":"f888e6fe-2c5e-492f-b368-3c05ffd20b8c","Type":"ContainerDied","Data":"13c7dd77975e267444aeec8d399dbfeaac5e8112b7461b3d3a43ce091a9cc2c9"} Dec 03 16:47:04 crc kubenswrapper[5002]: I1203 16:47:04.191130 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-j7qgc" podStartSLOduration=6.191105593 podStartE2EDuration="6.191105593s" podCreationTimestamp="2025-12-03 16:46:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:47:04.188968394 +0000 UTC m=+947.602790292" watchObservedRunningTime="2025-12-03 16:47:04.191105593 +0000 UTC m=+947.604927491" Dec 03 16:47:05 crc kubenswrapper[5002]: I1203 16:47:05.182587 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hlkqw" event={"ID":"f888e6fe-2c5e-492f-b368-3c05ffd20b8c","Type":"ContainerStarted","Data":"7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f"} Dec 03 16:47:06 crc kubenswrapper[5002]: I1203 16:47:06.859623 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hlkqw" podStartSLOduration=4.326724136 podStartE2EDuration="7.859602858s" podCreationTimestamp="2025-12-03 16:46:59 +0000 UTC" firstStartedPulling="2025-12-03 16:47:01.123153527 +0000 UTC m=+944.536975415" lastFinishedPulling="2025-12-03 16:47:04.656032249 +0000 UTC m=+948.069854137" observedRunningTime="2025-12-03 16:47:05.212948886 +0000 UTC m=+948.626770774" watchObservedRunningTime="2025-12-03 16:47:06.859602858 +0000 UTC m=+950.273424746" Dec 03 16:47:09 crc kubenswrapper[5002]: I1203 16:47:09.542296 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:47:09 crc kubenswrapper[5002]: I1203 16:47:09.542699 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:47:09 crc kubenswrapper[5002]: I1203 16:47:09.589412 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:47:10 crc kubenswrapper[5002]: I1203 16:47:10.272979 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:47:11 crc kubenswrapper[5002]: I1203 16:47:11.981635 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hlkqw"] Dec 03 16:47:12 crc kubenswrapper[5002]: I1203 16:47:12.244201 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hlkqw" podUID="f888e6fe-2c5e-492f-b368-3c05ffd20b8c" containerName="registry-server" containerID="cri-o://7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f" gracePeriod=2 Dec 03 16:47:12 crc kubenswrapper[5002]: I1203 16:47:12.732154 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:47:12 crc kubenswrapper[5002]: I1203 16:47:12.858195 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-utilities\") pod \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\" (UID: \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\") " Dec 03 16:47:12 crc kubenswrapper[5002]: I1203 16:47:12.858775 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwln9\" (UniqueName: \"kubernetes.io/projected/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-kube-api-access-bwln9\") pod \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\" (UID: \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\") " Dec 03 16:47:12 crc kubenswrapper[5002]: I1203 16:47:12.858858 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-catalog-content\") pod \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\" (UID: \"f888e6fe-2c5e-492f-b368-3c05ffd20b8c\") " Dec 03 16:47:12 crc kubenswrapper[5002]: I1203 16:47:12.861587 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-utilities" (OuterVolumeSpecName: "utilities") pod "f888e6fe-2c5e-492f-b368-3c05ffd20b8c" (UID: "f888e6fe-2c5e-492f-b368-3c05ffd20b8c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:47:12 crc kubenswrapper[5002]: I1203 16:47:12.868631 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-kube-api-access-bwln9" (OuterVolumeSpecName: "kube-api-access-bwln9") pod "f888e6fe-2c5e-492f-b368-3c05ffd20b8c" (UID: "f888e6fe-2c5e-492f-b368-3c05ffd20b8c"). InnerVolumeSpecName "kube-api-access-bwln9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:47:12 crc kubenswrapper[5002]: I1203 16:47:12.910608 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f888e6fe-2c5e-492f-b368-3c05ffd20b8c" (UID: "f888e6fe-2c5e-492f-b368-3c05ffd20b8c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:47:12 crc kubenswrapper[5002]: I1203 16:47:12.961155 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwln9\" (UniqueName: \"kubernetes.io/projected/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-kube-api-access-bwln9\") on node \"crc\" DevicePath \"\"" Dec 03 16:47:12 crc kubenswrapper[5002]: I1203 16:47:12.961202 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:47:12 crc kubenswrapper[5002]: I1203 16:47:12.961217 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f888e6fe-2c5e-492f-b368-3c05ffd20b8c-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.252854 5002 generic.go:334] "Generic (PLEG): container finished" podID="f888e6fe-2c5e-492f-b368-3c05ffd20b8c" containerID="7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f" exitCode=0 Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.252934 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hlkqw" event={"ID":"f888e6fe-2c5e-492f-b368-3c05ffd20b8c","Type":"ContainerDied","Data":"7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f"} Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.253008 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hlkqw" event={"ID":"f888e6fe-2c5e-492f-b368-3c05ffd20b8c","Type":"ContainerDied","Data":"a4cb11c9c2bf89d2396f135df74ca4f4a2d8e49f561095ed95ef7b00786d966f"} Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.253002 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hlkqw" Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.253085 5002 scope.go:117] "RemoveContainer" containerID="7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f" Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.255720 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wxdxf" event={"ID":"6c74eee1-29ec-4886-ada2-083436d4dc82","Type":"ContainerStarted","Data":"e327a3fab98c1a726825c9424fe2eccca3833aec9663a455af551b22464b6064"} Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.302973 5002 scope.go:117] "RemoveContainer" containerID="13c7dd77975e267444aeec8d399dbfeaac5e8112b7461b3d3a43ce091a9cc2c9" Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.319492 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hlkqw"] Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.326581 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hlkqw"] Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.338476 5002 scope.go:117] "RemoveContainer" containerID="9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e" Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.355368 5002 scope.go:117] "RemoveContainer" containerID="7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f" Dec 03 16:47:13 crc kubenswrapper[5002]: E1203 16:47:13.355972 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f\": container with ID starting with 7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f not found: ID does not exist" containerID="7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f" Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.356011 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f"} err="failed to get container status \"7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f\": rpc error: code = NotFound desc = could not find container \"7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f\": container with ID starting with 7071fc2172346099cb421fcb8f7c486a3f1b0027603c3755d95370958a57819f not found: ID does not exist" Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.356045 5002 scope.go:117] "RemoveContainer" containerID="13c7dd77975e267444aeec8d399dbfeaac5e8112b7461b3d3a43ce091a9cc2c9" Dec 03 16:47:13 crc kubenswrapper[5002]: E1203 16:47:13.356356 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13c7dd77975e267444aeec8d399dbfeaac5e8112b7461b3d3a43ce091a9cc2c9\": container with ID starting with 13c7dd77975e267444aeec8d399dbfeaac5e8112b7461b3d3a43ce091a9cc2c9 not found: ID does not exist" containerID="13c7dd77975e267444aeec8d399dbfeaac5e8112b7461b3d3a43ce091a9cc2c9" Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.356377 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13c7dd77975e267444aeec8d399dbfeaac5e8112b7461b3d3a43ce091a9cc2c9"} err="failed to get container status \"13c7dd77975e267444aeec8d399dbfeaac5e8112b7461b3d3a43ce091a9cc2c9\": rpc error: code = NotFound desc = could not find container \"13c7dd77975e267444aeec8d399dbfeaac5e8112b7461b3d3a43ce091a9cc2c9\": container with ID starting with 13c7dd77975e267444aeec8d399dbfeaac5e8112b7461b3d3a43ce091a9cc2c9 not found: ID does not exist" Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.356391 5002 scope.go:117] "RemoveContainer" containerID="9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e" Dec 03 16:47:13 crc kubenswrapper[5002]: E1203 16:47:13.356989 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e\": container with ID starting with 9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e not found: ID does not exist" containerID="9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e" Dec 03 16:47:13 crc kubenswrapper[5002]: I1203 16:47:13.357013 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e"} err="failed to get container status \"9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e\": rpc error: code = NotFound desc = could not find container \"9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e\": container with ID starting with 9fba82c080b5f5a2ce558f72bb5605e6603f18afac1f907d3ff0fa7901fde46e not found: ID does not exist" Dec 03 16:47:14 crc kubenswrapper[5002]: I1203 16:47:14.268764 5002 generic.go:334] "Generic (PLEG): container finished" podID="6c74eee1-29ec-4886-ada2-083436d4dc82" containerID="e327a3fab98c1a726825c9424fe2eccca3833aec9663a455af551b22464b6064" exitCode=0 Dec 03 16:47:14 crc kubenswrapper[5002]: I1203 16:47:14.268884 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wxdxf" event={"ID":"6c74eee1-29ec-4886-ada2-083436d4dc82","Type":"ContainerDied","Data":"e327a3fab98c1a726825c9424fe2eccca3833aec9663a455af551b22464b6064"} Dec 03 16:47:14 crc kubenswrapper[5002]: I1203 16:47:14.273637 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" event={"ID":"6c695792-74ee-418f-92fd-4bcb18beeb5d","Type":"ContainerStarted","Data":"ecf84cd7d4edc47195ec2c61816f71452ff6ae180ee6963ff73fc5dd74cf2f35"} Dec 03 16:47:14 crc kubenswrapper[5002]: I1203 16:47:14.847702 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f888e6fe-2c5e-492f-b368-3c05ffd20b8c" path="/var/lib/kubelet/pods/f888e6fe-2c5e-492f-b368-3c05ffd20b8c/volumes" Dec 03 16:47:15 crc kubenswrapper[5002]: I1203 16:47:15.286723 5002 generic.go:334] "Generic (PLEG): container finished" podID="6c74eee1-29ec-4886-ada2-083436d4dc82" containerID="991065aa0635df43e35800e90ebe24729bdb24ef5b4cf9294b5a4029b86acc24" exitCode=0 Dec 03 16:47:15 crc kubenswrapper[5002]: I1203 16:47:15.286809 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wxdxf" event={"ID":"6c74eee1-29ec-4886-ada2-083436d4dc82","Type":"ContainerDied","Data":"991065aa0635df43e35800e90ebe24729bdb24ef5b4cf9294b5a4029b86acc24"} Dec 03 16:47:15 crc kubenswrapper[5002]: I1203 16:47:15.307167 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" podStartSLOduration=5.06344104 podStartE2EDuration="17.307142887s" podCreationTimestamp="2025-12-03 16:46:58 +0000 UTC" firstStartedPulling="2025-12-03 16:47:00.105812126 +0000 UTC m=+943.519634014" lastFinishedPulling="2025-12-03 16:47:12.349513963 +0000 UTC m=+955.763335861" observedRunningTime="2025-12-03 16:47:15.301633027 +0000 UTC m=+958.715454945" watchObservedRunningTime="2025-12-03 16:47:15.307142887 +0000 UTC m=+958.720964785" Dec 03 16:47:17 crc kubenswrapper[5002]: I1203 16:47:17.299041 5002 generic.go:334] "Generic (PLEG): container finished" podID="6c74eee1-29ec-4886-ada2-083436d4dc82" containerID="43d734178666c767ad7dc313e40c5af9cb5b81638a73e8d433e1c369282e197e" exitCode=0 Dec 03 16:47:17 crc kubenswrapper[5002]: I1203 16:47:17.299104 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wxdxf" event={"ID":"6c74eee1-29ec-4886-ada2-083436d4dc82","Type":"ContainerDied","Data":"43d734178666c767ad7dc313e40c5af9cb5b81638a73e8d433e1c369282e197e"} Dec 03 16:47:18 crc kubenswrapper[5002]: I1203 16:47:18.309216 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wxdxf" event={"ID":"6c74eee1-29ec-4886-ada2-083436d4dc82","Type":"ContainerStarted","Data":"2a4629ebf35198048d70961fe89a2823b975b30f883059b2dcaf0378c43e57b1"} Dec 03 16:47:18 crc kubenswrapper[5002]: I1203 16:47:18.309797 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wxdxf" event={"ID":"6c74eee1-29ec-4886-ada2-083436d4dc82","Type":"ContainerStarted","Data":"bbca8877fef3de017a686cfb9edb5ac6d52b30d450b262bf54dbd9e083a4af52"} Dec 03 16:47:18 crc kubenswrapper[5002]: I1203 16:47:18.309830 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wxdxf" event={"ID":"6c74eee1-29ec-4886-ada2-083436d4dc82","Type":"ContainerStarted","Data":"1d74fb96cc53d06d1ddeeb5ff068ba78d8ee7a154778d7087490276b4da9e9da"} Dec 03 16:47:18 crc kubenswrapper[5002]: I1203 16:47:18.309844 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wxdxf" event={"ID":"6c74eee1-29ec-4886-ada2-083436d4dc82","Type":"ContainerStarted","Data":"fbdc2e9e6ea54495ebe60c4b286eeb0b74e274d36edd46ebedb5fcedd45d41b6"} Dec 03 16:47:18 crc kubenswrapper[5002]: I1203 16:47:18.309857 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wxdxf" event={"ID":"6c74eee1-29ec-4886-ada2-083436d4dc82","Type":"ContainerStarted","Data":"e11c3c912f3223e4774788a40e19d92dc9ed36187dd38864a483226fd955421d"} Dec 03 16:47:19 crc kubenswrapper[5002]: I1203 16:47:19.319186 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wxdxf" event={"ID":"6c74eee1-29ec-4886-ada2-083436d4dc82","Type":"ContainerStarted","Data":"aec4763f5ec8d16900670781b9181afc248528b89d494a7bac51c5bf4965c952"} Dec 03 16:47:19 crc kubenswrapper[5002]: I1203 16:47:19.319670 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:47:19 crc kubenswrapper[5002]: I1203 16:47:19.341077 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-wxdxf" podStartSLOduration=8.319259924 podStartE2EDuration="21.341059301s" podCreationTimestamp="2025-12-03 16:46:58 +0000 UTC" firstStartedPulling="2025-12-03 16:46:59.289083431 +0000 UTC m=+942.702905309" lastFinishedPulling="2025-12-03 16:47:12.310882798 +0000 UTC m=+955.724704686" observedRunningTime="2025-12-03 16:47:19.340088294 +0000 UTC m=+962.753910212" watchObservedRunningTime="2025-12-03 16:47:19.341059301 +0000 UTC m=+962.754881179" Dec 03 16:47:19 crc kubenswrapper[5002]: I1203 16:47:19.749361 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" Dec 03 16:47:19 crc kubenswrapper[5002]: I1203 16:47:19.890192 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-wqxqr" Dec 03 16:47:22 crc kubenswrapper[5002]: I1203 16:47:22.880855 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-j7qgc" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.126098 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.164676 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.634646 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh"] Dec 03 16:47:24 crc kubenswrapper[5002]: E1203 16:47:24.634950 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f888e6fe-2c5e-492f-b368-3c05ffd20b8c" containerName="registry-server" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.634963 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f888e6fe-2c5e-492f-b368-3c05ffd20b8c" containerName="registry-server" Dec 03 16:47:24 crc kubenswrapper[5002]: E1203 16:47:24.634978 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f888e6fe-2c5e-492f-b368-3c05ffd20b8c" containerName="extract-content" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.634985 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f888e6fe-2c5e-492f-b368-3c05ffd20b8c" containerName="extract-content" Dec 03 16:47:24 crc kubenswrapper[5002]: E1203 16:47:24.635015 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f888e6fe-2c5e-492f-b368-3c05ffd20b8c" containerName="extract-utilities" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.635025 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f888e6fe-2c5e-492f-b368-3c05ffd20b8c" containerName="extract-utilities" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.635130 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f888e6fe-2c5e-492f-b368-3c05ffd20b8c" containerName="registry-server" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.636093 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.638383 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.647380 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh"] Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.738227 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r99pk\" (UniqueName: \"kubernetes.io/projected/4dc1e514-8145-48e7-b668-0360bade0043-kube-api-access-r99pk\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh\" (UID: \"4dc1e514-8145-48e7-b668-0360bade0043\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.738356 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4dc1e514-8145-48e7-b668-0360bade0043-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh\" (UID: \"4dc1e514-8145-48e7-b668-0360bade0043\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.738381 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4dc1e514-8145-48e7-b668-0360bade0043-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh\" (UID: \"4dc1e514-8145-48e7-b668-0360bade0043\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.839806 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4dc1e514-8145-48e7-b668-0360bade0043-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh\" (UID: \"4dc1e514-8145-48e7-b668-0360bade0043\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.839863 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4dc1e514-8145-48e7-b668-0360bade0043-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh\" (UID: \"4dc1e514-8145-48e7-b668-0360bade0043\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.839896 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r99pk\" (UniqueName: \"kubernetes.io/projected/4dc1e514-8145-48e7-b668-0360bade0043-kube-api-access-r99pk\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh\" (UID: \"4dc1e514-8145-48e7-b668-0360bade0043\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.840903 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4dc1e514-8145-48e7-b668-0360bade0043-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh\" (UID: \"4dc1e514-8145-48e7-b668-0360bade0043\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.840909 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4dc1e514-8145-48e7-b668-0360bade0043-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh\" (UID: \"4dc1e514-8145-48e7-b668-0360bade0043\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.866032 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r99pk\" (UniqueName: \"kubernetes.io/projected/4dc1e514-8145-48e7-b668-0360bade0043-kube-api-access-r99pk\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh\" (UID: \"4dc1e514-8145-48e7-b668-0360bade0043\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:24 crc kubenswrapper[5002]: I1203 16:47:24.954993 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:25 crc kubenswrapper[5002]: I1203 16:47:25.231900 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh"] Dec 03 16:47:25 crc kubenswrapper[5002]: I1203 16:47:25.358217 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" event={"ID":"4dc1e514-8145-48e7-b668-0360bade0043","Type":"ContainerStarted","Data":"a8b7fb5241bb89298272a38ad746a3e5e00d72a031417613db9a7d8ed128634c"} Dec 03 16:47:26 crc kubenswrapper[5002]: I1203 16:47:26.370555 5002 generic.go:334] "Generic (PLEG): container finished" podID="4dc1e514-8145-48e7-b668-0360bade0043" containerID="fd12831229588763685e195a0ef010b9968274c15b789d6e6cc1db06cee1a57f" exitCode=0 Dec 03 16:47:26 crc kubenswrapper[5002]: I1203 16:47:26.370670 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" event={"ID":"4dc1e514-8145-48e7-b668-0360bade0043","Type":"ContainerDied","Data":"fd12831229588763685e195a0ef010b9968274c15b789d6e6cc1db06cee1a57f"} Dec 03 16:47:29 crc kubenswrapper[5002]: I1203 16:47:29.130078 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-wxdxf" Dec 03 16:47:29 crc kubenswrapper[5002]: I1203 16:47:29.760767 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wvmbf" Dec 03 16:47:30 crc kubenswrapper[5002]: I1203 16:47:30.410092 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" event={"ID":"4dc1e514-8145-48e7-b668-0360bade0043","Type":"ContainerStarted","Data":"2f44597ba1c765abd8acfdea5a09bddfb128a6b9b17caa7196977dec01530ed8"} Dec 03 16:47:31 crc kubenswrapper[5002]: I1203 16:47:31.424282 5002 generic.go:334] "Generic (PLEG): container finished" podID="4dc1e514-8145-48e7-b668-0360bade0043" containerID="2f44597ba1c765abd8acfdea5a09bddfb128a6b9b17caa7196977dec01530ed8" exitCode=0 Dec 03 16:47:31 crc kubenswrapper[5002]: I1203 16:47:31.424353 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" event={"ID":"4dc1e514-8145-48e7-b668-0360bade0043","Type":"ContainerDied","Data":"2f44597ba1c765abd8acfdea5a09bddfb128a6b9b17caa7196977dec01530ed8"} Dec 03 16:47:32 crc kubenswrapper[5002]: I1203 16:47:32.436142 5002 generic.go:334] "Generic (PLEG): container finished" podID="4dc1e514-8145-48e7-b668-0360bade0043" containerID="5bda97cb6c494712bb42e518bd598dddcec4a15ec7d81d5fbfbd4d816f4c5e0f" exitCode=0 Dec 03 16:47:32 crc kubenswrapper[5002]: I1203 16:47:32.436287 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" event={"ID":"4dc1e514-8145-48e7-b668-0360bade0043","Type":"ContainerDied","Data":"5bda97cb6c494712bb42e518bd598dddcec4a15ec7d81d5fbfbd4d816f4c5e0f"} Dec 03 16:47:33 crc kubenswrapper[5002]: I1203 16:47:33.802912 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:33 crc kubenswrapper[5002]: I1203 16:47:33.955282 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4dc1e514-8145-48e7-b668-0360bade0043-bundle\") pod \"4dc1e514-8145-48e7-b668-0360bade0043\" (UID: \"4dc1e514-8145-48e7-b668-0360bade0043\") " Dec 03 16:47:33 crc kubenswrapper[5002]: I1203 16:47:33.955484 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4dc1e514-8145-48e7-b668-0360bade0043-util\") pod \"4dc1e514-8145-48e7-b668-0360bade0043\" (UID: \"4dc1e514-8145-48e7-b668-0360bade0043\") " Dec 03 16:47:33 crc kubenswrapper[5002]: I1203 16:47:33.955561 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r99pk\" (UniqueName: \"kubernetes.io/projected/4dc1e514-8145-48e7-b668-0360bade0043-kube-api-access-r99pk\") pod \"4dc1e514-8145-48e7-b668-0360bade0043\" (UID: \"4dc1e514-8145-48e7-b668-0360bade0043\") " Dec 03 16:47:33 crc kubenswrapper[5002]: I1203 16:47:33.957001 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dc1e514-8145-48e7-b668-0360bade0043-bundle" (OuterVolumeSpecName: "bundle") pod "4dc1e514-8145-48e7-b668-0360bade0043" (UID: "4dc1e514-8145-48e7-b668-0360bade0043"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:47:33 crc kubenswrapper[5002]: I1203 16:47:33.963182 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dc1e514-8145-48e7-b668-0360bade0043-kube-api-access-r99pk" (OuterVolumeSpecName: "kube-api-access-r99pk") pod "4dc1e514-8145-48e7-b668-0360bade0043" (UID: "4dc1e514-8145-48e7-b668-0360bade0043"). InnerVolumeSpecName "kube-api-access-r99pk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:47:33 crc kubenswrapper[5002]: I1203 16:47:33.968972 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dc1e514-8145-48e7-b668-0360bade0043-util" (OuterVolumeSpecName: "util") pod "4dc1e514-8145-48e7-b668-0360bade0043" (UID: "4dc1e514-8145-48e7-b668-0360bade0043"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:47:34 crc kubenswrapper[5002]: I1203 16:47:34.057645 5002 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4dc1e514-8145-48e7-b668-0360bade0043-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:47:34 crc kubenswrapper[5002]: I1203 16:47:34.057704 5002 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4dc1e514-8145-48e7-b668-0360bade0043-util\") on node \"crc\" DevicePath \"\"" Dec 03 16:47:34 crc kubenswrapper[5002]: I1203 16:47:34.057718 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r99pk\" (UniqueName: \"kubernetes.io/projected/4dc1e514-8145-48e7-b668-0360bade0043-kube-api-access-r99pk\") on node \"crc\" DevicePath \"\"" Dec 03 16:47:34 crc kubenswrapper[5002]: I1203 16:47:34.458221 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" event={"ID":"4dc1e514-8145-48e7-b668-0360bade0043","Type":"ContainerDied","Data":"a8b7fb5241bb89298272a38ad746a3e5e00d72a031417613db9a7d8ed128634c"} Dec 03 16:47:34 crc kubenswrapper[5002]: I1203 16:47:34.458285 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8b7fb5241bb89298272a38ad746a3e5e00d72a031417613db9a7d8ed128634c" Dec 03 16:47:34 crc kubenswrapper[5002]: I1203 16:47:34.458397 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh" Dec 03 16:47:42 crc kubenswrapper[5002]: I1203 16:47:42.956626 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk"] Dec 03 16:47:42 crc kubenswrapper[5002]: E1203 16:47:42.957387 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc1e514-8145-48e7-b668-0360bade0043" containerName="extract" Dec 03 16:47:42 crc kubenswrapper[5002]: I1203 16:47:42.957398 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc1e514-8145-48e7-b668-0360bade0043" containerName="extract" Dec 03 16:47:42 crc kubenswrapper[5002]: E1203 16:47:42.957412 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc1e514-8145-48e7-b668-0360bade0043" containerName="util" Dec 03 16:47:42 crc kubenswrapper[5002]: I1203 16:47:42.957418 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc1e514-8145-48e7-b668-0360bade0043" containerName="util" Dec 03 16:47:42 crc kubenswrapper[5002]: E1203 16:47:42.957432 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc1e514-8145-48e7-b668-0360bade0043" containerName="pull" Dec 03 16:47:42 crc kubenswrapper[5002]: I1203 16:47:42.957439 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc1e514-8145-48e7-b668-0360bade0043" containerName="pull" Dec 03 16:47:42 crc kubenswrapper[5002]: I1203 16:47:42.957542 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dc1e514-8145-48e7-b668-0360bade0043" containerName="extract" Dec 03 16:47:42 crc kubenswrapper[5002]: I1203 16:47:42.958012 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk" Dec 03 16:47:42 crc kubenswrapper[5002]: I1203 16:47:42.960976 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Dec 03 16:47:42 crc kubenswrapper[5002]: I1203 16:47:42.962839 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Dec 03 16:47:42 crc kubenswrapper[5002]: I1203 16:47:42.963984 5002 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-8qwsb" Dec 03 16:47:42 crc kubenswrapper[5002]: I1203 16:47:42.984588 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk"] Dec 03 16:47:43 crc kubenswrapper[5002]: I1203 16:47:43.010687 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmjj4\" (UniqueName: \"kubernetes.io/projected/7f917dd3-1dfc-42f8-986f-631e471849d9-kube-api-access-lmjj4\") pod \"cert-manager-operator-controller-manager-64cf6dff88-b5psk\" (UID: \"7f917dd3-1dfc-42f8-986f-631e471849d9\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk" Dec 03 16:47:43 crc kubenswrapper[5002]: I1203 16:47:43.010791 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/7f917dd3-1dfc-42f8-986f-631e471849d9-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-b5psk\" (UID: \"7f917dd3-1dfc-42f8-986f-631e471849d9\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk" Dec 03 16:47:43 crc kubenswrapper[5002]: I1203 16:47:43.112441 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmjj4\" (UniqueName: \"kubernetes.io/projected/7f917dd3-1dfc-42f8-986f-631e471849d9-kube-api-access-lmjj4\") pod \"cert-manager-operator-controller-manager-64cf6dff88-b5psk\" (UID: \"7f917dd3-1dfc-42f8-986f-631e471849d9\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk" Dec 03 16:47:43 crc kubenswrapper[5002]: I1203 16:47:43.112718 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/7f917dd3-1dfc-42f8-986f-631e471849d9-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-b5psk\" (UID: \"7f917dd3-1dfc-42f8-986f-631e471849d9\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk" Dec 03 16:47:43 crc kubenswrapper[5002]: I1203 16:47:43.113541 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/7f917dd3-1dfc-42f8-986f-631e471849d9-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-b5psk\" (UID: \"7f917dd3-1dfc-42f8-986f-631e471849d9\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk" Dec 03 16:47:43 crc kubenswrapper[5002]: I1203 16:47:43.154674 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmjj4\" (UniqueName: \"kubernetes.io/projected/7f917dd3-1dfc-42f8-986f-631e471849d9-kube-api-access-lmjj4\") pod \"cert-manager-operator-controller-manager-64cf6dff88-b5psk\" (UID: \"7f917dd3-1dfc-42f8-986f-631e471849d9\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk" Dec 03 16:47:43 crc kubenswrapper[5002]: I1203 16:47:43.274396 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk" Dec 03 16:47:43 crc kubenswrapper[5002]: I1203 16:47:43.732189 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk"] Dec 03 16:47:43 crc kubenswrapper[5002]: W1203 16:47:43.748094 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f917dd3_1dfc_42f8_986f_631e471849d9.slice/crio-02f63042f541840cab9dff699f8b5f039a9c52a50a3e4f731ac8cbc0c8bc09d6 WatchSource:0}: Error finding container 02f63042f541840cab9dff699f8b5f039a9c52a50a3e4f731ac8cbc0c8bc09d6: Status 404 returned error can't find the container with id 02f63042f541840cab9dff699f8b5f039a9c52a50a3e4f731ac8cbc0c8bc09d6 Dec 03 16:47:44 crc kubenswrapper[5002]: I1203 16:47:44.521856 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk" event={"ID":"7f917dd3-1dfc-42f8-986f-631e471849d9","Type":"ContainerStarted","Data":"02f63042f541840cab9dff699f8b5f039a9c52a50a3e4f731ac8cbc0c8bc09d6"} Dec 03 16:47:50 crc kubenswrapper[5002]: I1203 16:47:50.916467 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:47:50 crc kubenswrapper[5002]: I1203 16:47:50.917130 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:47:52 crc kubenswrapper[5002]: I1203 16:47:52.577376 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk" event={"ID":"7f917dd3-1dfc-42f8-986f-631e471849d9","Type":"ContainerStarted","Data":"76d0e542e333879b9518d32e379f6d2b09195f05e1a9de20e03ed7ec4d9ec2fe"} Dec 03 16:47:52 crc kubenswrapper[5002]: I1203 16:47:52.602377 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-b5psk" podStartSLOduration=1.986440933 podStartE2EDuration="10.602352436s" podCreationTimestamp="2025-12-03 16:47:42 +0000 UTC" firstStartedPulling="2025-12-03 16:47:43.751980915 +0000 UTC m=+987.165802823" lastFinishedPulling="2025-12-03 16:47:52.367892408 +0000 UTC m=+995.781714326" observedRunningTime="2025-12-03 16:47:52.595831098 +0000 UTC m=+996.009652986" watchObservedRunningTime="2025-12-03 16:47:52.602352436 +0000 UTC m=+996.016174324" Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.092360 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-sll6f"] Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.093867 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sll6f" Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.095888 5002 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-5r4qb" Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.096014 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.096115 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.104001 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-sll6f"] Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.186621 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/549ee4dc-b28a-48da-ad9e-4155cdfb67d5-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-sll6f\" (UID: \"549ee4dc-b28a-48da-ad9e-4155cdfb67d5\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sll6f" Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.186798 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v9bq\" (UniqueName: \"kubernetes.io/projected/549ee4dc-b28a-48da-ad9e-4155cdfb67d5-kube-api-access-7v9bq\") pod \"cert-manager-cainjector-855d9ccff4-sll6f\" (UID: \"549ee4dc-b28a-48da-ad9e-4155cdfb67d5\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sll6f" Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.288355 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/549ee4dc-b28a-48da-ad9e-4155cdfb67d5-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-sll6f\" (UID: \"549ee4dc-b28a-48da-ad9e-4155cdfb67d5\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sll6f" Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.288482 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v9bq\" (UniqueName: \"kubernetes.io/projected/549ee4dc-b28a-48da-ad9e-4155cdfb67d5-kube-api-access-7v9bq\") pod \"cert-manager-cainjector-855d9ccff4-sll6f\" (UID: \"549ee4dc-b28a-48da-ad9e-4155cdfb67d5\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sll6f" Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.314396 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v9bq\" (UniqueName: \"kubernetes.io/projected/549ee4dc-b28a-48da-ad9e-4155cdfb67d5-kube-api-access-7v9bq\") pod \"cert-manager-cainjector-855d9ccff4-sll6f\" (UID: \"549ee4dc-b28a-48da-ad9e-4155cdfb67d5\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sll6f" Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.317732 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/549ee4dc-b28a-48da-ad9e-4155cdfb67d5-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-sll6f\" (UID: \"549ee4dc-b28a-48da-ad9e-4155cdfb67d5\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sll6f" Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.433439 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sll6f" Dec 03 16:47:58 crc kubenswrapper[5002]: I1203 16:47:58.701715 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-sll6f"] Dec 03 16:47:59 crc kubenswrapper[5002]: I1203 16:47:59.629313 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sll6f" event={"ID":"549ee4dc-b28a-48da-ad9e-4155cdfb67d5","Type":"ContainerStarted","Data":"0890d95abd828e4daeba70081e7455113623d28108d072a44c782ba068202109"} Dec 03 16:48:01 crc kubenswrapper[5002]: I1203 16:48:01.718043 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-vlmzm"] Dec 03 16:48:01 crc kubenswrapper[5002]: I1203 16:48:01.719328 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" Dec 03 16:48:01 crc kubenswrapper[5002]: I1203 16:48:01.723051 5002 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-7xrj8" Dec 03 16:48:01 crc kubenswrapper[5002]: I1203 16:48:01.741642 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-vlmzm"] Dec 03 16:48:01 crc kubenswrapper[5002]: I1203 16:48:01.843260 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/30a62dfc-673c-4360-97a3-bb603b87aeab-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-vlmzm\" (UID: \"30a62dfc-673c-4360-97a3-bb603b87aeab\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" Dec 03 16:48:01 crc kubenswrapper[5002]: I1203 16:48:01.843326 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28lms\" (UniqueName: \"kubernetes.io/projected/30a62dfc-673c-4360-97a3-bb603b87aeab-kube-api-access-28lms\") pod \"cert-manager-webhook-f4fb5df64-vlmzm\" (UID: \"30a62dfc-673c-4360-97a3-bb603b87aeab\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" Dec 03 16:48:01 crc kubenswrapper[5002]: I1203 16:48:01.945286 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/30a62dfc-673c-4360-97a3-bb603b87aeab-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-vlmzm\" (UID: \"30a62dfc-673c-4360-97a3-bb603b87aeab\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" Dec 03 16:48:01 crc kubenswrapper[5002]: I1203 16:48:01.945509 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28lms\" (UniqueName: \"kubernetes.io/projected/30a62dfc-673c-4360-97a3-bb603b87aeab-kube-api-access-28lms\") pod \"cert-manager-webhook-f4fb5df64-vlmzm\" (UID: \"30a62dfc-673c-4360-97a3-bb603b87aeab\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" Dec 03 16:48:01 crc kubenswrapper[5002]: I1203 16:48:01.970775 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/30a62dfc-673c-4360-97a3-bb603b87aeab-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-vlmzm\" (UID: \"30a62dfc-673c-4360-97a3-bb603b87aeab\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" Dec 03 16:48:01 crc kubenswrapper[5002]: I1203 16:48:01.971337 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28lms\" (UniqueName: \"kubernetes.io/projected/30a62dfc-673c-4360-97a3-bb603b87aeab-kube-api-access-28lms\") pod \"cert-manager-webhook-f4fb5df64-vlmzm\" (UID: \"30a62dfc-673c-4360-97a3-bb603b87aeab\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" Dec 03 16:48:02 crc kubenswrapper[5002]: I1203 16:48:02.037470 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" Dec 03 16:48:02 crc kubenswrapper[5002]: I1203 16:48:02.500873 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-vlmzm"] Dec 03 16:48:02 crc kubenswrapper[5002]: I1203 16:48:02.656200 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" event={"ID":"30a62dfc-673c-4360-97a3-bb603b87aeab","Type":"ContainerStarted","Data":"3c09eae48548354a6a87022b7ac1b0dc40ce5cd6346ebe922e21bc7b88eabad9"} Dec 03 16:48:12 crc kubenswrapper[5002]: I1203 16:48:12.724570 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" event={"ID":"30a62dfc-673c-4360-97a3-bb603b87aeab","Type":"ContainerStarted","Data":"620c6a01c98dad484add5dcb313af9836e9fa107bc57bb4ddc56010c8ad370db"} Dec 03 16:48:12 crc kubenswrapper[5002]: I1203 16:48:12.725191 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" Dec 03 16:48:12 crc kubenswrapper[5002]: I1203 16:48:12.741436 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" podStartSLOduration=2.01988598 podStartE2EDuration="11.741420284s" podCreationTimestamp="2025-12-03 16:48:01 +0000 UTC" firstStartedPulling="2025-12-03 16:48:02.520458763 +0000 UTC m=+1005.934280651" lastFinishedPulling="2025-12-03 16:48:12.241993057 +0000 UTC m=+1015.655814955" observedRunningTime="2025-12-03 16:48:12.738624697 +0000 UTC m=+1016.152446585" watchObservedRunningTime="2025-12-03 16:48:12.741420284 +0000 UTC m=+1016.155242172" Dec 03 16:48:15 crc kubenswrapper[5002]: I1203 16:48:15.347737 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-7lvds"] Dec 03 16:48:15 crc kubenswrapper[5002]: I1203 16:48:15.349620 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-7lvds" Dec 03 16:48:15 crc kubenswrapper[5002]: I1203 16:48:15.353653 5002 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-vl7kw" Dec 03 16:48:15 crc kubenswrapper[5002]: I1203 16:48:15.360361 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-7lvds"] Dec 03 16:48:15 crc kubenswrapper[5002]: I1203 16:48:15.490924 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fd9abb89-d8f6-4d1b-9a15-e600de7edaed-bound-sa-token\") pod \"cert-manager-86cb77c54b-7lvds\" (UID: \"fd9abb89-d8f6-4d1b-9a15-e600de7edaed\") " pod="cert-manager/cert-manager-86cb77c54b-7lvds" Dec 03 16:48:15 crc kubenswrapper[5002]: I1203 16:48:15.491099 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gx8k\" (UniqueName: \"kubernetes.io/projected/fd9abb89-d8f6-4d1b-9a15-e600de7edaed-kube-api-access-4gx8k\") pod \"cert-manager-86cb77c54b-7lvds\" (UID: \"fd9abb89-d8f6-4d1b-9a15-e600de7edaed\") " pod="cert-manager/cert-manager-86cb77c54b-7lvds" Dec 03 16:48:15 crc kubenswrapper[5002]: I1203 16:48:15.592272 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gx8k\" (UniqueName: \"kubernetes.io/projected/fd9abb89-d8f6-4d1b-9a15-e600de7edaed-kube-api-access-4gx8k\") pod \"cert-manager-86cb77c54b-7lvds\" (UID: \"fd9abb89-d8f6-4d1b-9a15-e600de7edaed\") " pod="cert-manager/cert-manager-86cb77c54b-7lvds" Dec 03 16:48:15 crc kubenswrapper[5002]: I1203 16:48:15.592365 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fd9abb89-d8f6-4d1b-9a15-e600de7edaed-bound-sa-token\") pod \"cert-manager-86cb77c54b-7lvds\" (UID: \"fd9abb89-d8f6-4d1b-9a15-e600de7edaed\") " pod="cert-manager/cert-manager-86cb77c54b-7lvds" Dec 03 16:48:15 crc kubenswrapper[5002]: I1203 16:48:15.620402 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fd9abb89-d8f6-4d1b-9a15-e600de7edaed-bound-sa-token\") pod \"cert-manager-86cb77c54b-7lvds\" (UID: \"fd9abb89-d8f6-4d1b-9a15-e600de7edaed\") " pod="cert-manager/cert-manager-86cb77c54b-7lvds" Dec 03 16:48:15 crc kubenswrapper[5002]: I1203 16:48:15.621876 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gx8k\" (UniqueName: \"kubernetes.io/projected/fd9abb89-d8f6-4d1b-9a15-e600de7edaed-kube-api-access-4gx8k\") pod \"cert-manager-86cb77c54b-7lvds\" (UID: \"fd9abb89-d8f6-4d1b-9a15-e600de7edaed\") " pod="cert-manager/cert-manager-86cb77c54b-7lvds" Dec 03 16:48:15 crc kubenswrapper[5002]: I1203 16:48:15.671059 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-7lvds" Dec 03 16:48:15 crc kubenswrapper[5002]: I1203 16:48:15.981195 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-7lvds"] Dec 03 16:48:16 crc kubenswrapper[5002]: I1203 16:48:16.761829 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-7lvds" event={"ID":"fd9abb89-d8f6-4d1b-9a15-e600de7edaed","Type":"ContainerStarted","Data":"a204c7b2f51c77e00a47c54c0103071b64eb71b42f7af1004a41b4951ca85f87"} Dec 03 16:48:16 crc kubenswrapper[5002]: I1203 16:48:16.762438 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-7lvds" event={"ID":"fd9abb89-d8f6-4d1b-9a15-e600de7edaed","Type":"ContainerStarted","Data":"f936417018b9cf5634b92e1fa361e536e1a70e7de34cdc342754004a34110177"} Dec 03 16:48:16 crc kubenswrapper[5002]: I1203 16:48:16.813565 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-7lvds" podStartSLOduration=1.81353667 podStartE2EDuration="1.81353667s" podCreationTimestamp="2025-12-03 16:48:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:48:16.785818024 +0000 UTC m=+1020.199639952" watchObservedRunningTime="2025-12-03 16:48:16.81353667 +0000 UTC m=+1020.227358578" Dec 03 16:48:17 crc kubenswrapper[5002]: I1203 16:48:17.040530 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-vlmzm" Dec 03 16:48:20 crc kubenswrapper[5002]: I1203 16:48:20.932016 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:48:20 crc kubenswrapper[5002]: I1203 16:48:20.932736 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:48:30 crc kubenswrapper[5002]: I1203 16:48:30.881691 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sll6f" event={"ID":"549ee4dc-b28a-48da-ad9e-4155cdfb67d5","Type":"ContainerStarted","Data":"7dd87449e77c12f1f389a9be46c80ca71ef4d2e8c3289868a427087a4b7e3e82"} Dec 03 16:48:30 crc kubenswrapper[5002]: I1203 16:48:30.902661 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sll6f" podStartSLOduration=1.45773866 podStartE2EDuration="32.902638409s" podCreationTimestamp="2025-12-03 16:47:58 +0000 UTC" firstStartedPulling="2025-12-03 16:47:58.709635487 +0000 UTC m=+1002.123457375" lastFinishedPulling="2025-12-03 16:48:30.154535236 +0000 UTC m=+1033.568357124" observedRunningTime="2025-12-03 16:48:30.89679467 +0000 UTC m=+1034.310616558" watchObservedRunningTime="2025-12-03 16:48:30.902638409 +0000 UTC m=+1034.316460297" Dec 03 16:48:35 crc kubenswrapper[5002]: I1203 16:48:35.531585 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-rxmzw"] Dec 03 16:48:35 crc kubenswrapper[5002]: I1203 16:48:35.536073 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rxmzw" Dec 03 16:48:35 crc kubenswrapper[5002]: I1203 16:48:35.538606 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-cpn7n" Dec 03 16:48:35 crc kubenswrapper[5002]: I1203 16:48:35.538887 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 03 16:48:35 crc kubenswrapper[5002]: I1203 16:48:35.540628 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 03 16:48:35 crc kubenswrapper[5002]: I1203 16:48:35.551615 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-rxmzw"] Dec 03 16:48:35 crc kubenswrapper[5002]: I1203 16:48:35.653408 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d9cq\" (UniqueName: \"kubernetes.io/projected/0f77e943-3341-47b5-afb1-bcc740172d6f-kube-api-access-8d9cq\") pod \"openstack-operator-index-rxmzw\" (UID: \"0f77e943-3341-47b5-afb1-bcc740172d6f\") " pod="openstack-operators/openstack-operator-index-rxmzw" Dec 03 16:48:35 crc kubenswrapper[5002]: I1203 16:48:35.755172 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d9cq\" (UniqueName: \"kubernetes.io/projected/0f77e943-3341-47b5-afb1-bcc740172d6f-kube-api-access-8d9cq\") pod \"openstack-operator-index-rxmzw\" (UID: \"0f77e943-3341-47b5-afb1-bcc740172d6f\") " pod="openstack-operators/openstack-operator-index-rxmzw" Dec 03 16:48:35 crc kubenswrapper[5002]: I1203 16:48:35.774549 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d9cq\" (UniqueName: \"kubernetes.io/projected/0f77e943-3341-47b5-afb1-bcc740172d6f-kube-api-access-8d9cq\") pod \"openstack-operator-index-rxmzw\" (UID: \"0f77e943-3341-47b5-afb1-bcc740172d6f\") " pod="openstack-operators/openstack-operator-index-rxmzw" Dec 03 16:48:35 crc kubenswrapper[5002]: I1203 16:48:35.907782 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rxmzw" Dec 03 16:48:36 crc kubenswrapper[5002]: I1203 16:48:36.100389 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-rxmzw"] Dec 03 16:48:36 crc kubenswrapper[5002]: I1203 16:48:36.924393 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rxmzw" event={"ID":"0f77e943-3341-47b5-afb1-bcc740172d6f","Type":"ContainerStarted","Data":"cf800df354ce9f0ceec03c780392ac6a6379ede9e3e5922a059f62b4fb0e518c"} Dec 03 16:48:37 crc kubenswrapper[5002]: I1203 16:48:37.933359 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rxmzw" event={"ID":"0f77e943-3341-47b5-afb1-bcc740172d6f","Type":"ContainerStarted","Data":"22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca"} Dec 03 16:48:37 crc kubenswrapper[5002]: I1203 16:48:37.969237 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-rxmzw" podStartSLOduration=2.209533385 podStartE2EDuration="2.969206805s" podCreationTimestamp="2025-12-03 16:48:35 +0000 UTC" firstStartedPulling="2025-12-03 16:48:36.106554309 +0000 UTC m=+1039.520376197" lastFinishedPulling="2025-12-03 16:48:36.866227729 +0000 UTC m=+1040.280049617" observedRunningTime="2025-12-03 16:48:37.951199974 +0000 UTC m=+1041.365021862" watchObservedRunningTime="2025-12-03 16:48:37.969206805 +0000 UTC m=+1041.383028693" Dec 03 16:48:38 crc kubenswrapper[5002]: I1203 16:48:38.905112 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-rxmzw"] Dec 03 16:48:39 crc kubenswrapper[5002]: I1203 16:48:39.510707 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-4j7h9"] Dec 03 16:48:39 crc kubenswrapper[5002]: I1203 16:48:39.511735 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4j7h9" Dec 03 16:48:39 crc kubenswrapper[5002]: I1203 16:48:39.536216 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4j7h9"] Dec 03 16:48:39 crc kubenswrapper[5002]: I1203 16:48:39.714201 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6snq5\" (UniqueName: \"kubernetes.io/projected/de13f869-61a0-48df-817f-e75d1d405308-kube-api-access-6snq5\") pod \"openstack-operator-index-4j7h9\" (UID: \"de13f869-61a0-48df-817f-e75d1d405308\") " pod="openstack-operators/openstack-operator-index-4j7h9" Dec 03 16:48:39 crc kubenswrapper[5002]: I1203 16:48:39.815483 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6snq5\" (UniqueName: \"kubernetes.io/projected/de13f869-61a0-48df-817f-e75d1d405308-kube-api-access-6snq5\") pod \"openstack-operator-index-4j7h9\" (UID: \"de13f869-61a0-48df-817f-e75d1d405308\") " pod="openstack-operators/openstack-operator-index-4j7h9" Dec 03 16:48:39 crc kubenswrapper[5002]: I1203 16:48:39.834482 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6snq5\" (UniqueName: \"kubernetes.io/projected/de13f869-61a0-48df-817f-e75d1d405308-kube-api-access-6snq5\") pod \"openstack-operator-index-4j7h9\" (UID: \"de13f869-61a0-48df-817f-e75d1d405308\") " pod="openstack-operators/openstack-operator-index-4j7h9" Dec 03 16:48:39 crc kubenswrapper[5002]: I1203 16:48:39.840367 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4j7h9" Dec 03 16:48:39 crc kubenswrapper[5002]: I1203 16:48:39.948149 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-rxmzw" podUID="0f77e943-3341-47b5-afb1-bcc740172d6f" containerName="registry-server" containerID="cri-o://22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca" gracePeriod=2 Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.043667 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4j7h9"] Dec 03 16:48:40 crc kubenswrapper[5002]: W1203 16:48:40.061234 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde13f869_61a0_48df_817f_e75d1d405308.slice/crio-869389173b99d537cc0d5653f23fc33a3d52a3861a9b71a0e5a7191337a84e35 WatchSource:0}: Error finding container 869389173b99d537cc0d5653f23fc33a3d52a3861a9b71a0e5a7191337a84e35: Status 404 returned error can't find the container with id 869389173b99d537cc0d5653f23fc33a3d52a3861a9b71a0e5a7191337a84e35 Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.260386 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rxmzw" Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.426424 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8d9cq\" (UniqueName: \"kubernetes.io/projected/0f77e943-3341-47b5-afb1-bcc740172d6f-kube-api-access-8d9cq\") pod \"0f77e943-3341-47b5-afb1-bcc740172d6f\" (UID: \"0f77e943-3341-47b5-afb1-bcc740172d6f\") " Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.431630 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f77e943-3341-47b5-afb1-bcc740172d6f-kube-api-access-8d9cq" (OuterVolumeSpecName: "kube-api-access-8d9cq") pod "0f77e943-3341-47b5-afb1-bcc740172d6f" (UID: "0f77e943-3341-47b5-afb1-bcc740172d6f"). InnerVolumeSpecName "kube-api-access-8d9cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.527974 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8d9cq\" (UniqueName: \"kubernetes.io/projected/0f77e943-3341-47b5-afb1-bcc740172d6f-kube-api-access-8d9cq\") on node \"crc\" DevicePath \"\"" Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.956367 5002 generic.go:334] "Generic (PLEG): container finished" podID="0f77e943-3341-47b5-afb1-bcc740172d6f" containerID="22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca" exitCode=0 Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.956479 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rxmzw" event={"ID":"0f77e943-3341-47b5-afb1-bcc740172d6f","Type":"ContainerDied","Data":"22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca"} Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.956519 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rxmzw" event={"ID":"0f77e943-3341-47b5-afb1-bcc740172d6f","Type":"ContainerDied","Data":"cf800df354ce9f0ceec03c780392ac6a6379ede9e3e5922a059f62b4fb0e518c"} Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.956538 5002 scope.go:117] "RemoveContainer" containerID="22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca" Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.956663 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rxmzw" Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.963945 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4j7h9" event={"ID":"de13f869-61a0-48df-817f-e75d1d405308","Type":"ContainerStarted","Data":"869389173b99d537cc0d5653f23fc33a3d52a3861a9b71a0e5a7191337a84e35"} Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.980061 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-rxmzw"] Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.986045 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-rxmzw"] Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.990664 5002 scope.go:117] "RemoveContainer" containerID="22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca" Dec 03 16:48:40 crc kubenswrapper[5002]: E1203 16:48:40.991205 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca\": container with ID starting with 22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca not found: ID does not exist" containerID="22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca" Dec 03 16:48:40 crc kubenswrapper[5002]: I1203 16:48:40.991241 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca"} err="failed to get container status \"22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca\": rpc error: code = NotFound desc = could not find container \"22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca\": container with ID starting with 22ec258ce4f6766813aa9a2b1b4ccb7f354e78fde86d83e193652b0d0647b3ca not found: ID does not exist" Dec 03 16:48:42 crc kubenswrapper[5002]: I1203 16:48:42.854855 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f77e943-3341-47b5-afb1-bcc740172d6f" path="/var/lib/kubelet/pods/0f77e943-3341-47b5-afb1-bcc740172d6f/volumes" Dec 03 16:48:45 crc kubenswrapper[5002]: I1203 16:48:45.000275 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4j7h9" event={"ID":"de13f869-61a0-48df-817f-e75d1d405308","Type":"ContainerStarted","Data":"dbbb5f278b83e8bbd73595358d180f0cac918bec8fd472caf3e01984da4fed21"} Dec 03 16:48:45 crc kubenswrapper[5002]: I1203 16:48:45.022313 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-4j7h9" podStartSLOduration=2.022128672 podStartE2EDuration="6.022282604s" podCreationTimestamp="2025-12-03 16:48:39 +0000 UTC" firstStartedPulling="2025-12-03 16:48:40.065280881 +0000 UTC m=+1043.479102769" lastFinishedPulling="2025-12-03 16:48:44.065434783 +0000 UTC m=+1047.479256701" observedRunningTime="2025-12-03 16:48:45.016422473 +0000 UTC m=+1048.430244371" watchObservedRunningTime="2025-12-03 16:48:45.022282604 +0000 UTC m=+1048.436104512" Dec 03 16:48:49 crc kubenswrapper[5002]: I1203 16:48:49.840999 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-4j7h9" Dec 03 16:48:49 crc kubenswrapper[5002]: I1203 16:48:49.841325 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-4j7h9" Dec 03 16:48:49 crc kubenswrapper[5002]: I1203 16:48:49.879373 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-4j7h9" Dec 03 16:48:50 crc kubenswrapper[5002]: I1203 16:48:50.077158 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-4j7h9" Dec 03 16:48:50 crc kubenswrapper[5002]: I1203 16:48:50.917623 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:48:50 crc kubenswrapper[5002]: I1203 16:48:50.918095 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:48:50 crc kubenswrapper[5002]: I1203 16:48:50.918176 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:48:50 crc kubenswrapper[5002]: I1203 16:48:50.919074 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"73ac542ac5ae95737fc5bd0085cb65082e08deae3560c2f23506ea5bddf84026"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:48:50 crc kubenswrapper[5002]: I1203 16:48:50.919154 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://73ac542ac5ae95737fc5bd0085cb65082e08deae3560c2f23506ea5bddf84026" gracePeriod=600 Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.156600 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49"] Dec 03 16:48:51 crc kubenswrapper[5002]: E1203 16:48:51.157504 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f77e943-3341-47b5-afb1-bcc740172d6f" containerName="registry-server" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.157525 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f77e943-3341-47b5-afb1-bcc740172d6f" containerName="registry-server" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.157834 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f77e943-3341-47b5-afb1-bcc740172d6f" containerName="registry-server" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.162063 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.165060 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-x245h" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.176457 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49"] Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.196984 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6f94aeac-86c4-4982-a520-1748553b83ac-util\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49\" (UID: \"6f94aeac-86c4-4982-a520-1748553b83ac\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.197029 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6f94aeac-86c4-4982-a520-1748553b83ac-bundle\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49\" (UID: \"6f94aeac-86c4-4982-a520-1748553b83ac\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.197057 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsf44\" (UniqueName: \"kubernetes.io/projected/6f94aeac-86c4-4982-a520-1748553b83ac-kube-api-access-gsf44\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49\" (UID: \"6f94aeac-86c4-4982-a520-1748553b83ac\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.298280 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6f94aeac-86c4-4982-a520-1748553b83ac-util\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49\" (UID: \"6f94aeac-86c4-4982-a520-1748553b83ac\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.298601 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6f94aeac-86c4-4982-a520-1748553b83ac-bundle\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49\" (UID: \"6f94aeac-86c4-4982-a520-1748553b83ac\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.298694 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsf44\" (UniqueName: \"kubernetes.io/projected/6f94aeac-86c4-4982-a520-1748553b83ac-kube-api-access-gsf44\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49\" (UID: \"6f94aeac-86c4-4982-a520-1748553b83ac\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.299048 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6f94aeac-86c4-4982-a520-1748553b83ac-util\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49\" (UID: \"6f94aeac-86c4-4982-a520-1748553b83ac\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.299852 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6f94aeac-86c4-4982-a520-1748553b83ac-bundle\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49\" (UID: \"6f94aeac-86c4-4982-a520-1748553b83ac\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.316987 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsf44\" (UniqueName: \"kubernetes.io/projected/6f94aeac-86c4-4982-a520-1748553b83ac-kube-api-access-gsf44\") pod \"98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49\" (UID: \"6f94aeac-86c4-4982-a520-1748553b83ac\") " pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.480161 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:51 crc kubenswrapper[5002]: I1203 16:48:51.876724 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49"] Dec 03 16:48:51 crc kubenswrapper[5002]: W1203 16:48:51.882635 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f94aeac_86c4_4982_a520_1748553b83ac.slice/crio-8ae63091ea1e4e68c9e6c55be31b56f2b126b2e4d78897b19751169522843102 WatchSource:0}: Error finding container 8ae63091ea1e4e68c9e6c55be31b56f2b126b2e4d78897b19751169522843102: Status 404 returned error can't find the container with id 8ae63091ea1e4e68c9e6c55be31b56f2b126b2e4d78897b19751169522843102 Dec 03 16:48:52 crc kubenswrapper[5002]: I1203 16:48:52.049501 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" event={"ID":"6f94aeac-86c4-4982-a520-1748553b83ac","Type":"ContainerStarted","Data":"160f1fb4c97ce46d489d5bdaf0d40961864ae64fb0362daaae4d343bf8799ea0"} Dec 03 16:48:52 crc kubenswrapper[5002]: I1203 16:48:52.049920 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" event={"ID":"6f94aeac-86c4-4982-a520-1748553b83ac","Type":"ContainerStarted","Data":"8ae63091ea1e4e68c9e6c55be31b56f2b126b2e4d78897b19751169522843102"} Dec 03 16:48:52 crc kubenswrapper[5002]: I1203 16:48:52.053789 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="73ac542ac5ae95737fc5bd0085cb65082e08deae3560c2f23506ea5bddf84026" exitCode=0 Dec 03 16:48:52 crc kubenswrapper[5002]: I1203 16:48:52.053853 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"73ac542ac5ae95737fc5bd0085cb65082e08deae3560c2f23506ea5bddf84026"} Dec 03 16:48:52 crc kubenswrapper[5002]: I1203 16:48:52.053962 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"80bed6e41a955a593b74ebe3d33480022d2c94cec0b0862556f213fb12fa5abe"} Dec 03 16:48:52 crc kubenswrapper[5002]: I1203 16:48:52.053991 5002 scope.go:117] "RemoveContainer" containerID="a989eba6e883743beeaf62d8ab7a87b75096a5c8d56e61d5262eda90b8b04b66" Dec 03 16:48:53 crc kubenswrapper[5002]: I1203 16:48:53.066702 5002 generic.go:334] "Generic (PLEG): container finished" podID="6f94aeac-86c4-4982-a520-1748553b83ac" containerID="160f1fb4c97ce46d489d5bdaf0d40961864ae64fb0362daaae4d343bf8799ea0" exitCode=0 Dec 03 16:48:53 crc kubenswrapper[5002]: I1203 16:48:53.066861 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" event={"ID":"6f94aeac-86c4-4982-a520-1748553b83ac","Type":"ContainerDied","Data":"160f1fb4c97ce46d489d5bdaf0d40961864ae64fb0362daaae4d343bf8799ea0"} Dec 03 16:48:54 crc kubenswrapper[5002]: I1203 16:48:54.074255 5002 generic.go:334] "Generic (PLEG): container finished" podID="6f94aeac-86c4-4982-a520-1748553b83ac" containerID="57b5cb56fb36f25ae1b917186c0a79ccd980b5f30c21d2439d645a5f469063fc" exitCode=0 Dec 03 16:48:54 crc kubenswrapper[5002]: I1203 16:48:54.074301 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" event={"ID":"6f94aeac-86c4-4982-a520-1748553b83ac","Type":"ContainerDied","Data":"57b5cb56fb36f25ae1b917186c0a79ccd980b5f30c21d2439d645a5f469063fc"} Dec 03 16:48:55 crc kubenswrapper[5002]: I1203 16:48:55.083906 5002 generic.go:334] "Generic (PLEG): container finished" podID="6f94aeac-86c4-4982-a520-1748553b83ac" containerID="78b290e0bb0bd15a4a636c5e23557dad962fbc3d9aba1513911ea67bbb424838" exitCode=0 Dec 03 16:48:55 crc kubenswrapper[5002]: I1203 16:48:55.083982 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" event={"ID":"6f94aeac-86c4-4982-a520-1748553b83ac","Type":"ContainerDied","Data":"78b290e0bb0bd15a4a636c5e23557dad962fbc3d9aba1513911ea67bbb424838"} Dec 03 16:48:56 crc kubenswrapper[5002]: I1203 16:48:56.386790 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:56 crc kubenswrapper[5002]: I1203 16:48:56.484880 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6f94aeac-86c4-4982-a520-1748553b83ac-bundle\") pod \"6f94aeac-86c4-4982-a520-1748553b83ac\" (UID: \"6f94aeac-86c4-4982-a520-1748553b83ac\") " Dec 03 16:48:56 crc kubenswrapper[5002]: I1203 16:48:56.485095 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsf44\" (UniqueName: \"kubernetes.io/projected/6f94aeac-86c4-4982-a520-1748553b83ac-kube-api-access-gsf44\") pod \"6f94aeac-86c4-4982-a520-1748553b83ac\" (UID: \"6f94aeac-86c4-4982-a520-1748553b83ac\") " Dec 03 16:48:56 crc kubenswrapper[5002]: I1203 16:48:56.485127 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6f94aeac-86c4-4982-a520-1748553b83ac-util\") pod \"6f94aeac-86c4-4982-a520-1748553b83ac\" (UID: \"6f94aeac-86c4-4982-a520-1748553b83ac\") " Dec 03 16:48:56 crc kubenswrapper[5002]: I1203 16:48:56.485877 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f94aeac-86c4-4982-a520-1748553b83ac-bundle" (OuterVolumeSpecName: "bundle") pod "6f94aeac-86c4-4982-a520-1748553b83ac" (UID: "6f94aeac-86c4-4982-a520-1748553b83ac"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:48:56 crc kubenswrapper[5002]: I1203 16:48:56.491951 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f94aeac-86c4-4982-a520-1748553b83ac-kube-api-access-gsf44" (OuterVolumeSpecName: "kube-api-access-gsf44") pod "6f94aeac-86c4-4982-a520-1748553b83ac" (UID: "6f94aeac-86c4-4982-a520-1748553b83ac"). InnerVolumeSpecName "kube-api-access-gsf44". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:48:56 crc kubenswrapper[5002]: I1203 16:48:56.503344 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f94aeac-86c4-4982-a520-1748553b83ac-util" (OuterVolumeSpecName: "util") pod "6f94aeac-86c4-4982-a520-1748553b83ac" (UID: "6f94aeac-86c4-4982-a520-1748553b83ac"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:48:56 crc kubenswrapper[5002]: I1203 16:48:56.586731 5002 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6f94aeac-86c4-4982-a520-1748553b83ac-util\") on node \"crc\" DevicePath \"\"" Dec 03 16:48:56 crc kubenswrapper[5002]: I1203 16:48:56.586797 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsf44\" (UniqueName: \"kubernetes.io/projected/6f94aeac-86c4-4982-a520-1748553b83ac-kube-api-access-gsf44\") on node \"crc\" DevicePath \"\"" Dec 03 16:48:56 crc kubenswrapper[5002]: I1203 16:48:56.586809 5002 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6f94aeac-86c4-4982-a520-1748553b83ac-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:48:57 crc kubenswrapper[5002]: I1203 16:48:57.104662 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" event={"ID":"6f94aeac-86c4-4982-a520-1748553b83ac","Type":"ContainerDied","Data":"8ae63091ea1e4e68c9e6c55be31b56f2b126b2e4d78897b19751169522843102"} Dec 03 16:48:57 crc kubenswrapper[5002]: I1203 16:48:57.104729 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ae63091ea1e4e68c9e6c55be31b56f2b126b2e4d78897b19751169522843102" Dec 03 16:48:57 crc kubenswrapper[5002]: I1203 16:48:57.104879 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49" Dec 03 16:48:58 crc kubenswrapper[5002]: I1203 16:48:58.941942 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n"] Dec 03 16:48:58 crc kubenswrapper[5002]: E1203 16:48:58.942717 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f94aeac-86c4-4982-a520-1748553b83ac" containerName="util" Dec 03 16:48:58 crc kubenswrapper[5002]: I1203 16:48:58.942734 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f94aeac-86c4-4982-a520-1748553b83ac" containerName="util" Dec 03 16:48:58 crc kubenswrapper[5002]: E1203 16:48:58.942787 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f94aeac-86c4-4982-a520-1748553b83ac" containerName="extract" Dec 03 16:48:58 crc kubenswrapper[5002]: I1203 16:48:58.942795 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f94aeac-86c4-4982-a520-1748553b83ac" containerName="extract" Dec 03 16:48:58 crc kubenswrapper[5002]: E1203 16:48:58.942824 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f94aeac-86c4-4982-a520-1748553b83ac" containerName="pull" Dec 03 16:48:58 crc kubenswrapper[5002]: I1203 16:48:58.942832 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f94aeac-86c4-4982-a520-1748553b83ac" containerName="pull" Dec 03 16:48:58 crc kubenswrapper[5002]: I1203 16:48:58.943064 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f94aeac-86c4-4982-a520-1748553b83ac" containerName="extract" Dec 03 16:48:58 crc kubenswrapper[5002]: I1203 16:48:58.943699 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n" Dec 03 16:48:58 crc kubenswrapper[5002]: I1203 16:48:58.945737 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-zzk7g" Dec 03 16:48:58 crc kubenswrapper[5002]: I1203 16:48:58.968178 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n"] Dec 03 16:48:59 crc kubenswrapper[5002]: I1203 16:48:59.023609 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qttrv\" (UniqueName: \"kubernetes.io/projected/b7076603-1553-462f-ad95-bbb4b423a1cc-kube-api-access-qttrv\") pod \"openstack-operator-controller-operator-7dd5c7bb7c-g9r6n\" (UID: \"b7076603-1553-462f-ad95-bbb4b423a1cc\") " pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n" Dec 03 16:48:59 crc kubenswrapper[5002]: I1203 16:48:59.124941 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qttrv\" (UniqueName: \"kubernetes.io/projected/b7076603-1553-462f-ad95-bbb4b423a1cc-kube-api-access-qttrv\") pod \"openstack-operator-controller-operator-7dd5c7bb7c-g9r6n\" (UID: \"b7076603-1553-462f-ad95-bbb4b423a1cc\") " pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n" Dec 03 16:48:59 crc kubenswrapper[5002]: I1203 16:48:59.150072 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qttrv\" (UniqueName: \"kubernetes.io/projected/b7076603-1553-462f-ad95-bbb4b423a1cc-kube-api-access-qttrv\") pod \"openstack-operator-controller-operator-7dd5c7bb7c-g9r6n\" (UID: \"b7076603-1553-462f-ad95-bbb4b423a1cc\") " pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n" Dec 03 16:48:59 crc kubenswrapper[5002]: I1203 16:48:59.261809 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n" Dec 03 16:48:59 crc kubenswrapper[5002]: I1203 16:48:59.517202 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n"] Dec 03 16:49:00 crc kubenswrapper[5002]: I1203 16:49:00.124435 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n" event={"ID":"b7076603-1553-462f-ad95-bbb4b423a1cc","Type":"ContainerStarted","Data":"a3ecc7c9fe3017d00afae59103a78a59de61044769276fe4fc951234f45dceab"} Dec 03 16:49:04 crc kubenswrapper[5002]: I1203 16:49:04.150711 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n" event={"ID":"b7076603-1553-462f-ad95-bbb4b423a1cc","Type":"ContainerStarted","Data":"ba014f341537fab700a0312611f19ac2a44abc8805bb5bb4cc5ae69a32ecdfd6"} Dec 03 16:49:04 crc kubenswrapper[5002]: I1203 16:49:04.151685 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n" Dec 03 16:49:04 crc kubenswrapper[5002]: I1203 16:49:04.194391 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n" podStartSLOduration=2.178764068 podStartE2EDuration="6.194362262s" podCreationTimestamp="2025-12-03 16:48:58 +0000 UTC" firstStartedPulling="2025-12-03 16:48:59.52471043 +0000 UTC m=+1062.938532318" lastFinishedPulling="2025-12-03 16:49:03.540308624 +0000 UTC m=+1066.954130512" observedRunningTime="2025-12-03 16:49:04.190734493 +0000 UTC m=+1067.604556401" watchObservedRunningTime="2025-12-03 16:49:04.194362262 +0000 UTC m=+1067.608184170" Dec 03 16:49:09 crc kubenswrapper[5002]: I1203 16:49:09.268022 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7dd5c7bb7c-g9r6n" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.697108 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.698863 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.704827 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-8mrj9" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.711820 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.713328 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.715200 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-4cdxj" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.722868 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.724071 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.726629 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-jdvmx" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.754529 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.756164 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.759673 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-lmk82" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.760310 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.773086 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.791562 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.807537 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4crmj\" (UniqueName: \"kubernetes.io/projected/fbb180bd-c957-4362-a7ac-04065940a34e-kube-api-access-4crmj\") pod \"barbican-operator-controller-manager-7d9dfd778-bp8bt\" (UID: \"fbb180bd-c957-4362-a7ac-04065940a34e\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.815792 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.822981 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.826797 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.833217 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-w668d" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.846095 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.875778 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.877146 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.881294 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-mhc9w" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.891166 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-95x4f"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.892364 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.896437 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-q6hx4" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.896601 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.908918 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.914061 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert\") pod \"infra-operator-controller-manager-57548d458d-95x4f\" (UID: \"c23347dc-e104-4ca5-a132-60a102150117\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.914128 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z58n\" (UniqueName: \"kubernetes.io/projected/c23347dc-e104-4ca5-a132-60a102150117-kube-api-access-7z58n\") pod \"infra-operator-controller-manager-57548d458d-95x4f\" (UID: \"c23347dc-e104-4ca5-a132-60a102150117\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.914186 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ljzk\" (UniqueName: \"kubernetes.io/projected/af132426-0104-4abf-bad5-67615b919af7-kube-api-access-2ljzk\") pod \"horizon-operator-controller-manager-68c6d99b8f-b2xhv\" (UID: \"af132426-0104-4abf-bad5-67615b919af7\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.914245 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5lr6\" (UniqueName: \"kubernetes.io/projected/a25d04de-e230-4750-a4c9-6a43bf344b9e-kube-api-access-d5lr6\") pod \"glance-operator-controller-manager-77987cd8cd-s6d9w\" (UID: \"a25d04de-e230-4750-a4c9-6a43bf344b9e\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.914264 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4crmj\" (UniqueName: \"kubernetes.io/projected/fbb180bd-c957-4362-a7ac-04065940a34e-kube-api-access-4crmj\") pod \"barbican-operator-controller-manager-7d9dfd778-bp8bt\" (UID: \"fbb180bd-c957-4362-a7ac-04065940a34e\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.914325 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqz99\" (UniqueName: \"kubernetes.io/projected/f49045c6-275a-46b4-8f61-9fd85401869f-kube-api-access-qqz99\") pod \"heat-operator-controller-manager-5f64f6f8bb-rlh8w\" (UID: \"f49045c6-275a-46b4-8f61-9fd85401869f\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.914343 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bhfv\" (UniqueName: \"kubernetes.io/projected/7bb9718d-9129-421f-8f7e-8b5c8d7d9e53-kube-api-access-7bhfv\") pod \"designate-operator-controller-manager-78b4bc895b-w5vlv\" (UID: \"7bb9718d-9129-421f-8f7e-8b5c8d7d9e53\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.914360 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmkbr\" (UniqueName: \"kubernetes.io/projected/240013fb-5ef1-4923-8c16-3967ff1a47e8-kube-api-access-bmkbr\") pod \"cinder-operator-controller-manager-859b6ccc6-tjcc7\" (UID: \"240013fb-5ef1-4923-8c16-3967ff1a47e8\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7" Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.934913 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-95x4f"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.948570 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6"] Dec 03 16:49:27 crc kubenswrapper[5002]: I1203 16:49:27.982007 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.017647 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-h4755" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.035670 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z58n\" (UniqueName: \"kubernetes.io/projected/c23347dc-e104-4ca5-a132-60a102150117-kube-api-access-7z58n\") pod \"infra-operator-controller-manager-57548d458d-95x4f\" (UID: \"c23347dc-e104-4ca5-a132-60a102150117\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.036153 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4crmj\" (UniqueName: \"kubernetes.io/projected/fbb180bd-c957-4362-a7ac-04065940a34e-kube-api-access-4crmj\") pod \"barbican-operator-controller-manager-7d9dfd778-bp8bt\" (UID: \"fbb180bd-c957-4362-a7ac-04065940a34e\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.036271 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ljzk\" (UniqueName: \"kubernetes.io/projected/af132426-0104-4abf-bad5-67615b919af7-kube-api-access-2ljzk\") pod \"horizon-operator-controller-manager-68c6d99b8f-b2xhv\" (UID: \"af132426-0104-4abf-bad5-67615b919af7\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.036374 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5lr6\" (UniqueName: \"kubernetes.io/projected/a25d04de-e230-4750-a4c9-6a43bf344b9e-kube-api-access-d5lr6\") pod \"glance-operator-controller-manager-77987cd8cd-s6d9w\" (UID: \"a25d04de-e230-4750-a4c9-6a43bf344b9e\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.036483 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqz99\" (UniqueName: \"kubernetes.io/projected/f49045c6-275a-46b4-8f61-9fd85401869f-kube-api-access-qqz99\") pod \"heat-operator-controller-manager-5f64f6f8bb-rlh8w\" (UID: \"f49045c6-275a-46b4-8f61-9fd85401869f\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.036509 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bhfv\" (UniqueName: \"kubernetes.io/projected/7bb9718d-9129-421f-8f7e-8b5c8d7d9e53-kube-api-access-7bhfv\") pod \"designate-operator-controller-manager-78b4bc895b-w5vlv\" (UID: \"7bb9718d-9129-421f-8f7e-8b5c8d7d9e53\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.036532 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmkbr\" (UniqueName: \"kubernetes.io/projected/240013fb-5ef1-4923-8c16-3967ff1a47e8-kube-api-access-bmkbr\") pod \"cinder-operator-controller-manager-859b6ccc6-tjcc7\" (UID: \"240013fb-5ef1-4923-8c16-3967ff1a47e8\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.036641 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert\") pod \"infra-operator-controller-manager-57548d458d-95x4f\" (UID: \"c23347dc-e104-4ca5-a132-60a102150117\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:49:28 crc kubenswrapper[5002]: E1203 16:49:28.036810 5002 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 16:49:28 crc kubenswrapper[5002]: E1203 16:49:28.036868 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert podName:c23347dc-e104-4ca5-a132-60a102150117 nodeName:}" failed. No retries permitted until 2025-12-03 16:49:28.536844975 +0000 UTC m=+1091.950666863 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert") pod "infra-operator-controller-manager-57548d458d-95x4f" (UID: "c23347dc-e104-4ca5-a132-60a102150117") : secret "infra-operator-webhook-server-cert" not found Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.045033 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.060992 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.062613 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.083086 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-9gg6b" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.092599 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bhfv\" (UniqueName: \"kubernetes.io/projected/7bb9718d-9129-421f-8f7e-8b5c8d7d9e53-kube-api-access-7bhfv\") pod \"designate-operator-controller-manager-78b4bc895b-w5vlv\" (UID: \"7bb9718d-9129-421f-8f7e-8b5c8d7d9e53\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.096488 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z58n\" (UniqueName: \"kubernetes.io/projected/c23347dc-e104-4ca5-a132-60a102150117-kube-api-access-7z58n\") pod \"infra-operator-controller-manager-57548d458d-95x4f\" (UID: \"c23347dc-e104-4ca5-a132-60a102150117\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.096584 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.108465 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmkbr\" (UniqueName: \"kubernetes.io/projected/240013fb-5ef1-4923-8c16-3967ff1a47e8-kube-api-access-bmkbr\") pod \"cinder-operator-controller-manager-859b6ccc6-tjcc7\" (UID: \"240013fb-5ef1-4923-8c16-3967ff1a47e8\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.111372 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ljzk\" (UniqueName: \"kubernetes.io/projected/af132426-0104-4abf-bad5-67615b919af7-kube-api-access-2ljzk\") pod \"horizon-operator-controller-manager-68c6d99b8f-b2xhv\" (UID: \"af132426-0104-4abf-bad5-67615b919af7\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.111581 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5lr6\" (UniqueName: \"kubernetes.io/projected/a25d04de-e230-4750-a4c9-6a43bf344b9e-kube-api-access-d5lr6\") pod \"glance-operator-controller-manager-77987cd8cd-s6d9w\" (UID: \"a25d04de-e230-4750-a4c9-6a43bf344b9e\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.112382 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqz99\" (UniqueName: \"kubernetes.io/projected/f49045c6-275a-46b4-8f61-9fd85401869f-kube-api-access-qqz99\") pod \"heat-operator-controller-manager-5f64f6f8bb-rlh8w\" (UID: \"f49045c6-275a-46b4-8f61-9fd85401869f\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.115968 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.117467 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.130684 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-tr4jv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.149863 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vccp\" (UniqueName: \"kubernetes.io/projected/a54f31fa-c7c6-48b0-90b9-df614b116b8c-kube-api-access-9vccp\") pod \"ironic-operator-controller-manager-6c548fd776-8hlz6\" (UID: \"a54f31fa-c7c6-48b0-90b9-df614b116b8c\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.164126 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.192418 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.194139 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.196325 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-qqcjl" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.218807 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.230109 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.234881 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.239837 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.242649 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.247546 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-mpjgp" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.252422 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.252508 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vccp\" (UniqueName: \"kubernetes.io/projected/a54f31fa-c7c6-48b0-90b9-df614b116b8c-kube-api-access-9vccp\") pod \"ironic-operator-controller-manager-6c548fd776-8hlz6\" (UID: \"a54f31fa-c7c6-48b0-90b9-df614b116b8c\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.252607 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8wz7\" (UniqueName: \"kubernetes.io/projected/ec47f682-ce44-4e37-980b-a5e1c1142284-kube-api-access-b8wz7\") pod \"manila-operator-controller-manager-7c79b5df47-7hcw5\" (UID: \"ec47f682-ce44-4e37-980b-a5e1c1142284\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.252741 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkvvj\" (UniqueName: \"kubernetes.io/projected/4b55dbea-68c7-4290-a698-068c741b22a6-kube-api-access-mkvvj\") pod \"keystone-operator-controller-manager-7765d96ddf-bfqzl\" (UID: \"4b55dbea-68c7-4290-a698-068c741b22a6\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.257864 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.258033 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.259658 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-8mtlj" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.260997 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.265714 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.271470 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.273709 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.279624 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-vbc8z" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.282648 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vccp\" (UniqueName: \"kubernetes.io/projected/a54f31fa-c7c6-48b0-90b9-df614b116b8c-kube-api-access-9vccp\") pod \"ironic-operator-controller-manager-6c548fd776-8hlz6\" (UID: \"a54f31fa-c7c6-48b0-90b9-df614b116b8c\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.284768 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.293549 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.295414 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.298996 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-6hhgp" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.299243 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.321826 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.323245 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.330195 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-s9wq9" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.340289 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.343642 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.350681 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-9bzsg" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.359910 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.362104 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mhjn\" (UniqueName: \"kubernetes.io/projected/43a120e7-09d2-4d8e-8acd-06b6ab38cc40-kube-api-access-8mhjn\") pod \"nova-operator-controller-manager-697bc559fc-ngjzc\" (UID: \"43a120e7-09d2-4d8e-8acd-06b6ab38cc40\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.362532 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8wz7\" (UniqueName: \"kubernetes.io/projected/ec47f682-ce44-4e37-980b-a5e1c1142284-kube-api-access-b8wz7\") pod \"manila-operator-controller-manager-7c79b5df47-7hcw5\" (UID: \"ec47f682-ce44-4e37-980b-a5e1c1142284\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.362587 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5snfm\" (UniqueName: \"kubernetes.io/projected/e4af56b3-88df-4902-b51a-1ed81ced8583-kube-api-access-5snfm\") pod \"mariadb-operator-controller-manager-56bbcc9d85-rm2fw\" (UID: \"e4af56b3-88df-4902-b51a-1ed81ced8583\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.362625 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ntzv\" (UniqueName: \"kubernetes.io/projected/5ef47d10-5c5b-4cd6-b194-753a9fcc1b54-kube-api-access-5ntzv\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-gp7hz\" (UID: \"5ef47d10-5c5b-4cd6-b194-753a9fcc1b54\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.362670 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkvvj\" (UniqueName: \"kubernetes.io/projected/4b55dbea-68c7-4290-a698-068c741b22a6-kube-api-access-mkvvj\") pod \"keystone-operator-controller-manager-7765d96ddf-bfqzl\" (UID: \"4b55dbea-68c7-4290-a698-068c741b22a6\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.378071 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.384157 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.390894 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8wz7\" (UniqueName: \"kubernetes.io/projected/ec47f682-ce44-4e37-980b-a5e1c1142284-kube-api-access-b8wz7\") pod \"manila-operator-controller-manager-7c79b5df47-7hcw5\" (UID: \"ec47f682-ce44-4e37-980b-a5e1c1142284\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.395573 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.397114 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.404356 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-56sk4" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.405958 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.409722 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.422624 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.434150 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.441979 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.443445 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.446933 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-7vv59" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.463579 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5snfm\" (UniqueName: \"kubernetes.io/projected/e4af56b3-88df-4902-b51a-1ed81ced8583-kube-api-access-5snfm\") pod \"mariadb-operator-controller-manager-56bbcc9d85-rm2fw\" (UID: \"e4af56b3-88df-4902-b51a-1ed81ced8583\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.463657 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8c8p6\" (UniqueName: \"kubernetes.io/projected/0e786251-b94d-47b4-930f-5f2cac19cc52-kube-api-access-8c8p6\") pod \"octavia-operator-controller-manager-998648c74-cj9f5\" (UID: \"0e786251-b94d-47b4-930f-5f2cac19cc52\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.463697 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ntzv\" (UniqueName: \"kubernetes.io/projected/5ef47d10-5c5b-4cd6-b194-753a9fcc1b54-kube-api-access-5ntzv\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-gp7hz\" (UID: \"5ef47d10-5c5b-4cd6-b194-753a9fcc1b54\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.463733 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tj82\" (UniqueName: \"kubernetes.io/projected/127b2eae-6b24-45a7-871b-e9569b062e28-kube-api-access-2tj82\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686z66jk\" (UID: \"127b2eae-6b24-45a7-871b-e9569b062e28\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.463780 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzk2g\" (UniqueName: \"kubernetes.io/projected/b098b672-2320-4ba6-bd50-2237cb576d80-kube-api-access-xzk2g\") pod \"placement-operator-controller-manager-78f8948974-lfdk7\" (UID: \"b098b672-2320-4ba6-bd50-2237cb576d80\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.463801 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkvvj\" (UniqueName: \"kubernetes.io/projected/4b55dbea-68c7-4290-a698-068c741b22a6-kube-api-access-mkvvj\") pod \"keystone-operator-controller-manager-7765d96ddf-bfqzl\" (UID: \"4b55dbea-68c7-4290-a698-068c741b22a6\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.463815 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qsxc\" (UniqueName: \"kubernetes.io/projected/cd874f14-4eb0-46ae-8968-4a52e4a3bc69-kube-api-access-4qsxc\") pod \"ovn-operator-controller-manager-b6456fdb6-bwfpx\" (UID: \"cd874f14-4eb0-46ae-8968-4a52e4a3bc69\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.463939 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686z66jk\" (UID: \"127b2eae-6b24-45a7-871b-e9569b062e28\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.464141 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mhjn\" (UniqueName: \"kubernetes.io/projected/43a120e7-09d2-4d8e-8acd-06b6ab38cc40-kube-api-access-8mhjn\") pod \"nova-operator-controller-manager-697bc559fc-ngjzc\" (UID: \"43a120e7-09d2-4d8e-8acd-06b6ab38cc40\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.472818 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.483221 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.488208 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.491634 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.493027 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.498014 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-fxj4r" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.498245 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.522505 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ntzv\" (UniqueName: \"kubernetes.io/projected/5ef47d10-5c5b-4cd6-b194-753a9fcc1b54-kube-api-access-5ntzv\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-gp7hz\" (UID: \"5ef47d10-5c5b-4cd6-b194-753a9fcc1b54\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.527589 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5snfm\" (UniqueName: \"kubernetes.io/projected/e4af56b3-88df-4902-b51a-1ed81ced8583-kube-api-access-5snfm\") pod \"mariadb-operator-controller-manager-56bbcc9d85-rm2fw\" (UID: \"e4af56b3-88df-4902-b51a-1ed81ced8583\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.542845 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mhjn\" (UniqueName: \"kubernetes.io/projected/43a120e7-09d2-4d8e-8acd-06b6ab38cc40-kube-api-access-8mhjn\") pod \"nova-operator-controller-manager-697bc559fc-ngjzc\" (UID: \"43a120e7-09d2-4d8e-8acd-06b6ab38cc40\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.567358 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shlcw\" (UniqueName: \"kubernetes.io/projected/395c93a8-4649-4ddf-b630-d9982c670991-kube-api-access-shlcw\") pod \"telemetry-operator-controller-manager-76cc84c6bb-4rqsk\" (UID: \"395c93a8-4649-4ddf-b630-d9982c670991\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.567446 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzk2g\" (UniqueName: \"kubernetes.io/projected/b098b672-2320-4ba6-bd50-2237cb576d80-kube-api-access-xzk2g\") pod \"placement-operator-controller-manager-78f8948974-lfdk7\" (UID: \"b098b672-2320-4ba6-bd50-2237cb576d80\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.567479 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tj82\" (UniqueName: \"kubernetes.io/projected/127b2eae-6b24-45a7-871b-e9569b062e28-kube-api-access-2tj82\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686z66jk\" (UID: \"127b2eae-6b24-45a7-871b-e9569b062e28\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.567515 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qsxc\" (UniqueName: \"kubernetes.io/projected/cd874f14-4eb0-46ae-8968-4a52e4a3bc69-kube-api-access-4qsxc\") pod \"ovn-operator-controller-manager-b6456fdb6-bwfpx\" (UID: \"cd874f14-4eb0-46ae-8968-4a52e4a3bc69\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.567541 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686z66jk\" (UID: \"127b2eae-6b24-45a7-871b-e9569b062e28\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.567578 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bp6g\" (UniqueName: \"kubernetes.io/projected/97caeb4d-2742-41fb-ac4f-440555163d81-kube-api-access-6bp6g\") pod \"swift-operator-controller-manager-5f8c65bbfc-jbfdk\" (UID: \"97caeb4d-2742-41fb-ac4f-440555163d81\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.567632 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert\") pod \"infra-operator-controller-manager-57548d458d-95x4f\" (UID: \"c23347dc-e104-4ca5-a132-60a102150117\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.567663 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8c8p6\" (UniqueName: \"kubernetes.io/projected/0e786251-b94d-47b4-930f-5f2cac19cc52-kube-api-access-8c8p6\") pod \"octavia-operator-controller-manager-998648c74-cj9f5\" (UID: \"0e786251-b94d-47b4-930f-5f2cac19cc52\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5" Dec 03 16:49:28 crc kubenswrapper[5002]: E1203 16:49:28.569051 5002 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:49:28 crc kubenswrapper[5002]: E1203 16:49:28.569108 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert podName:127b2eae-6b24-45a7-871b-e9569b062e28 nodeName:}" failed. No retries permitted until 2025-12-03 16:49:29.069088768 +0000 UTC m=+1092.482910656 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert") pod "openstack-baremetal-operator-controller-manager-55d86b6686z66jk" (UID: "127b2eae-6b24-45a7-871b-e9569b062e28") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:49:28 crc kubenswrapper[5002]: E1203 16:49:28.569388 5002 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 16:49:28 crc kubenswrapper[5002]: E1203 16:49:28.569442 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert podName:c23347dc-e104-4ca5-a132-60a102150117 nodeName:}" failed. No retries permitted until 2025-12-03 16:49:29.569406668 +0000 UTC m=+1092.983228546 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert") pod "infra-operator-controller-manager-57548d458d-95x4f" (UID: "c23347dc-e104-4ca5-a132-60a102150117") : secret "infra-operator-webhook-server-cert" not found Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.582269 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.593997 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.602699 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzk2g\" (UniqueName: \"kubernetes.io/projected/b098b672-2320-4ba6-bd50-2237cb576d80-kube-api-access-xzk2g\") pod \"placement-operator-controller-manager-78f8948974-lfdk7\" (UID: \"b098b672-2320-4ba6-bd50-2237cb576d80\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.606151 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qsxc\" (UniqueName: \"kubernetes.io/projected/cd874f14-4eb0-46ae-8968-4a52e4a3bc69-kube-api-access-4qsxc\") pod \"ovn-operator-controller-manager-b6456fdb6-bwfpx\" (UID: \"cd874f14-4eb0-46ae-8968-4a52e4a3bc69\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.607424 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tj82\" (UniqueName: \"kubernetes.io/projected/127b2eae-6b24-45a7-871b-e9569b062e28-kube-api-access-2tj82\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686z66jk\" (UID: \"127b2eae-6b24-45a7-871b-e9569b062e28\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.608104 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8c8p6\" (UniqueName: \"kubernetes.io/projected/0e786251-b94d-47b4-930f-5f2cac19cc52-kube-api-access-8c8p6\") pod \"octavia-operator-controller-manager-998648c74-cj9f5\" (UID: \"0e786251-b94d-47b4-930f-5f2cac19cc52\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.619272 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.640163 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.642201 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.649912 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-r2nh4" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.651034 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.659828 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.662290 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.669222 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shlcw\" (UniqueName: \"kubernetes.io/projected/395c93a8-4649-4ddf-b630-d9982c670991-kube-api-access-shlcw\") pod \"telemetry-operator-controller-manager-76cc84c6bb-4rqsk\" (UID: \"395c93a8-4649-4ddf-b630-d9982c670991\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.670326 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8tbr\" (UniqueName: \"kubernetes.io/projected/6fc5b30e-4458-4d0e-b476-107c4f92f56a-kube-api-access-p8tbr\") pod \"test-operator-controller-manager-5854674fcc-w2vkv\" (UID: \"6fc5b30e-4458-4d0e-b476-107c4f92f56a\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.672188 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bp6g\" (UniqueName: \"kubernetes.io/projected/97caeb4d-2742-41fb-ac4f-440555163d81-kube-api-access-6bp6g\") pod \"swift-operator-controller-manager-5f8c65bbfc-jbfdk\" (UID: \"97caeb4d-2742-41fb-ac4f-440555163d81\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.704032 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shlcw\" (UniqueName: \"kubernetes.io/projected/395c93a8-4649-4ddf-b630-d9982c670991-kube-api-access-shlcw\") pod \"telemetry-operator-controller-manager-76cc84c6bb-4rqsk\" (UID: \"395c93a8-4649-4ddf-b630-d9982c670991\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.720958 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bp6g\" (UniqueName: \"kubernetes.io/projected/97caeb4d-2742-41fb-ac4f-440555163d81-kube-api-access-6bp6g\") pod \"swift-operator-controller-manager-5f8c65bbfc-jbfdk\" (UID: \"97caeb4d-2742-41fb-ac4f-440555163d81\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.776913 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jjx2\" (UniqueName: \"kubernetes.io/projected/4ddf28f1-4702-477d-b1d8-4f6758b4dc9a-kube-api-access-4jjx2\") pod \"watcher-operator-controller-manager-769dc69bc-8jrq7\" (UID: \"4ddf28f1-4702-477d-b1d8-4f6758b4dc9a\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.777239 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8tbr\" (UniqueName: \"kubernetes.io/projected/6fc5b30e-4458-4d0e-b476-107c4f92f56a-kube-api-access-p8tbr\") pod \"test-operator-controller-manager-5854674fcc-w2vkv\" (UID: \"6fc5b30e-4458-4d0e-b476-107c4f92f56a\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.778231 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.779384 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.787060 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.787273 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-8b8cq" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.796416 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.804834 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.816561 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8tbr\" (UniqueName: \"kubernetes.io/projected/6fc5b30e-4458-4d0e-b476-107c4f92f56a-kube-api-access-p8tbr\") pod \"test-operator-controller-manager-5854674fcc-w2vkv\" (UID: \"6fc5b30e-4458-4d0e-b476-107c4f92f56a\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.835227 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.837127 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.841520 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-5stfv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.871854 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.878562 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.878636 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.878675 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c88nj\" (UniqueName: \"kubernetes.io/projected/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-kube-api-access-c88nj\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.878718 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jjx2\" (UniqueName: \"kubernetes.io/projected/4ddf28f1-4702-477d-b1d8-4f6758b4dc9a-kube-api-access-4jjx2\") pod \"watcher-operator-controller-manager-769dc69bc-8jrq7\" (UID: \"4ddf28f1-4702-477d-b1d8-4f6758b4dc9a\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.891137 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.898677 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.907842 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jjx2\" (UniqueName: \"kubernetes.io/projected/4ddf28f1-4702-477d-b1d8-4f6758b4dc9a-kube-api-access-4jjx2\") pod \"watcher-operator-controller-manager-769dc69bc-8jrq7\" (UID: \"4ddf28f1-4702-477d-b1d8-4f6758b4dc9a\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.937768 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.949936 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.955159 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt"] Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.972959 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.979882 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c88nj\" (UniqueName: \"kubernetes.io/projected/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-kube-api-access-c88nj\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.979945 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gp2s\" (UniqueName: \"kubernetes.io/projected/84f51195-36c1-4039-af68-b0643b7c27e5-kube-api-access-7gp2s\") pod \"rabbitmq-cluster-operator-manager-668c99d594-zgpvf\" (UID: \"84f51195-36c1-4039-af68-b0643b7c27e5\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.994681 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.994774 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:28 crc kubenswrapper[5002]: E1203 16:49:28.994883 5002 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 16:49:28 crc kubenswrapper[5002]: E1203 16:49:28.994933 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs podName:c295c3ec-fe2b-4ae0-a818-6847d923dc1d nodeName:}" failed. No retries permitted until 2025-12-03 16:49:29.494915277 +0000 UTC m=+1092.908737165 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs") pod "openstack-operator-controller-manager-9f56fc979-b5vdg" (UID: "c295c3ec-fe2b-4ae0-a818-6847d923dc1d") : secret "webhook-server-cert" not found Dec 03 16:49:28 crc kubenswrapper[5002]: I1203 16:49:28.993770 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" Dec 03 16:49:28 crc kubenswrapper[5002]: E1203 16:49:28.995540 5002 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 16:49:28 crc kubenswrapper[5002]: E1203 16:49:28.995567 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs podName:c295c3ec-fe2b-4ae0-a818-6847d923dc1d nodeName:}" failed. No retries permitted until 2025-12-03 16:49:29.495558745 +0000 UTC m=+1092.909380633 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs") pod "openstack-operator-controller-manager-9f56fc979-b5vdg" (UID: "c295c3ec-fe2b-4ae0-a818-6847d923dc1d") : secret "metrics-server-cert" not found Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.035652 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c88nj\" (UniqueName: \"kubernetes.io/projected/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-kube-api-access-c88nj\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.098667 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gp2s\" (UniqueName: \"kubernetes.io/projected/84f51195-36c1-4039-af68-b0643b7c27e5-kube-api-access-7gp2s\") pod \"rabbitmq-cluster-operator-manager-668c99d594-zgpvf\" (UID: \"84f51195-36c1-4039-af68-b0643b7c27e5\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf" Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.098774 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686z66jk\" (UID: \"127b2eae-6b24-45a7-871b-e9569b062e28\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:49:29 crc kubenswrapper[5002]: E1203 16:49:29.098945 5002 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:49:29 crc kubenswrapper[5002]: E1203 16:49:29.099017 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert podName:127b2eae-6b24-45a7-871b-e9569b062e28 nodeName:}" failed. No retries permitted until 2025-12-03 16:49:30.098996208 +0000 UTC m=+1093.512818096 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert") pod "openstack-baremetal-operator-controller-manager-55d86b6686z66jk" (UID: "127b2eae-6b24-45a7-871b-e9569b062e28") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.133966 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gp2s\" (UniqueName: \"kubernetes.io/projected/84f51195-36c1-4039-af68-b0643b7c27e5-kube-api-access-7gp2s\") pod \"rabbitmq-cluster-operator-manager-668c99d594-zgpvf\" (UID: \"84f51195-36c1-4039-af68-b0643b7c27e5\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf" Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.200974 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf" Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.366031 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt" event={"ID":"fbb180bd-c957-4362-a7ac-04065940a34e","Type":"ContainerStarted","Data":"9748d5d82b305055d2fc45523b7690d15ad85f72c5cc7128dc6967311130dbad"} Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.491374 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv"] Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.515102 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.515197 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:29 crc kubenswrapper[5002]: E1203 16:49:29.515360 5002 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 16:49:29 crc kubenswrapper[5002]: E1203 16:49:29.515420 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs podName:c295c3ec-fe2b-4ae0-a818-6847d923dc1d nodeName:}" failed. No retries permitted until 2025-12-03 16:49:30.5154016 +0000 UTC m=+1093.929223488 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs") pod "openstack-operator-controller-manager-9f56fc979-b5vdg" (UID: "c295c3ec-fe2b-4ae0-a818-6847d923dc1d") : secret "webhook-server-cert" not found Dec 03 16:49:29 crc kubenswrapper[5002]: E1203 16:49:29.515931 5002 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 16:49:29 crc kubenswrapper[5002]: E1203 16:49:29.515969 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs podName:c295c3ec-fe2b-4ae0-a818-6847d923dc1d nodeName:}" failed. No retries permitted until 2025-12-03 16:49:30.515958136 +0000 UTC m=+1093.929780024 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs") pod "openstack-operator-controller-manager-9f56fc979-b5vdg" (UID: "c295c3ec-fe2b-4ae0-a818-6847d923dc1d") : secret "metrics-server-cert" not found Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.516596 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w"] Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.617011 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert\") pod \"infra-operator-controller-manager-57548d458d-95x4f\" (UID: \"c23347dc-e104-4ca5-a132-60a102150117\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:49:29 crc kubenswrapper[5002]: E1203 16:49:29.617246 5002 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 16:49:29 crc kubenswrapper[5002]: E1203 16:49:29.617312 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert podName:c23347dc-e104-4ca5-a132-60a102150117 nodeName:}" failed. No retries permitted until 2025-12-03 16:49:31.617292041 +0000 UTC m=+1095.031113929 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert") pod "infra-operator-controller-manager-57548d458d-95x4f" (UID: "c23347dc-e104-4ca5-a132-60a102150117") : secret "infra-operator-webhook-server-cert" not found Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.686220 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl"] Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.729839 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7"] Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.734215 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6"] Dec 03 16:49:29 crc kubenswrapper[5002]: W1203 16:49:29.742778 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda54f31fa_c7c6_48b0_90b9_df614b116b8c.slice/crio-ab47e3400e544c31d923ecb39ae0e24c29c0fade46ee0f4afbebe34d79b5ea8a WatchSource:0}: Error finding container ab47e3400e544c31d923ecb39ae0e24c29c0fade46ee0f4afbebe34d79b5ea8a: Status 404 returned error can't find the container with id ab47e3400e544c31d923ecb39ae0e24c29c0fade46ee0f4afbebe34d79b5ea8a Dec 03 16:49:29 crc kubenswrapper[5002]: W1203 16:49:29.744013 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod240013fb_5ef1_4923_8c16_3967ff1a47e8.slice/crio-adc6ae965666a053a2560ee97749f3c5f371f91aec2f640a24ea231d86aaec14 WatchSource:0}: Error finding container adc6ae965666a053a2560ee97749f3c5f371f91aec2f640a24ea231d86aaec14: Status 404 returned error can't find the container with id adc6ae965666a053a2560ee97749f3c5f371f91aec2f640a24ea231d86aaec14 Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.870278 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv"] Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.885437 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w"] Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.909842 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5"] Dec 03 16:49:29 crc kubenswrapper[5002]: I1203 16:49:29.933416 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw"] Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.116440 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5"] Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.124429 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7"] Dec 03 16:49:30 crc kubenswrapper[5002]: W1203 16:49:30.126289 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43a120e7_09d2_4d8e_8acd_06b6ab38cc40.slice/crio-e54dbddf60b28e65d58f4b5105f436ac6e07b3ad0846c49f6bd090670e310d66 WatchSource:0}: Error finding container e54dbddf60b28e65d58f4b5105f436ac6e07b3ad0846c49f6bd090670e310d66: Status 404 returned error can't find the container with id e54dbddf60b28e65d58f4b5105f436ac6e07b3ad0846c49f6bd090670e310d66 Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.136618 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8mhjn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-ngjzc_openstack-operators(43a120e7-09d2-4d8e-8acd-06b6ab38cc40): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.142820 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686z66jk\" (UID: \"127b2eae-6b24-45a7-871b-e9569b062e28\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.143112 5002 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.143184 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert podName:127b2eae-6b24-45a7-871b-e9569b062e28 nodeName:}" failed. No retries permitted until 2025-12-03 16:49:32.143161291 +0000 UTC m=+1095.556983179 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert") pod "openstack-baremetal-operator-controller-manager-55d86b6686z66jk" (UID: "127b2eae-6b24-45a7-871b-e9569b062e28") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.147358 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8mhjn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-ngjzc_openstack-operators(43a120e7-09d2-4d8e-8acd-06b6ab38cc40): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.149401 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" podUID="43a120e7-09d2-4d8e-8acd-06b6ab38cc40" Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.153455 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz"] Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.160945 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc"] Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.186841 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk"] Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.197797 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf"] Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.205312 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx"] Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.216153 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7"] Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.222062 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk"] Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.229888 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7gp2s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-zgpvf_openstack-operators(84f51195-36c1-4039-af68-b0643b7c27e5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.229920 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv"] Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.231798 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf" podUID="84f51195-36c1-4039-af68-b0643b7c27e5" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.236028 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4jjx2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-8jrq7_openstack-operators(4ddf28f1-4702-477d-b1d8-4f6758b4dc9a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.238002 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4qsxc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-bwfpx_openstack-operators(cd874f14-4eb0-46ae-8968-4a52e4a3bc69): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.238712 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4jjx2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-8jrq7_openstack-operators(4ddf28f1-4702-477d-b1d8-4f6758b4dc9a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.240092 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" podUID="4ddf28f1-4702-477d-b1d8-4f6758b4dc9a" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.240642 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4qsxc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-bwfpx_openstack-operators(cd874f14-4eb0-46ae-8968-4a52e4a3bc69): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.242267 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" podUID="cd874f14-4eb0-46ae-8968-4a52e4a3bc69" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.246724 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p8tbr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-w2vkv_openstack-operators(6fc5b30e-4458-4d0e-b476-107c4f92f56a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:49:30 crc kubenswrapper[5002]: W1203 16:49:30.248628 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod395c93a8_4649_4ddf_b630_d9982c670991.slice/crio-1cc0c92a6b2c38ec21b26a35590e5b0de88b4e1b22c06e04223e696dfa7463bb WatchSource:0}: Error finding container 1cc0c92a6b2c38ec21b26a35590e5b0de88b4e1b22c06e04223e696dfa7463bb: Status 404 returned error can't find the container with id 1cc0c92a6b2c38ec21b26a35590e5b0de88b4e1b22c06e04223e696dfa7463bb Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.248777 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p8tbr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-w2vkv_openstack-operators(6fc5b30e-4458-4d0e-b476-107c4f92f56a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.250047 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" podUID="6fc5b30e-4458-4d0e-b476-107c4f92f56a" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.252149 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-shlcw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-4rqsk_openstack-operators(395c93a8-4649-4ddf-b630-d9982c670991): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.254855 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-shlcw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-4rqsk_openstack-operators(395c93a8-4649-4ddf-b630-d9982c670991): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.256000 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" podUID="395c93a8-4649-4ddf-b630-d9982c670991" Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.377821 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" event={"ID":"6fc5b30e-4458-4d0e-b476-107c4f92f56a","Type":"ContainerStarted","Data":"4e56b66553485e89b8ad8f4971a3762babee805ca5653e3477721c667df679db"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.380339 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv" event={"ID":"af132426-0104-4abf-bad5-67615b919af7","Type":"ContainerStarted","Data":"00195a2e41ec4d912256b1cf5bcc2bf877fd70b991de52544c7617c8f6bd4c72"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.382443 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw" event={"ID":"e4af56b3-88df-4902-b51a-1ed81ced8583","Type":"ContainerStarted","Data":"ecb476101844656022ba773b58245910db8f7f775d43a2a1d784549e114e56a2"} Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.384339 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" podUID="6fc5b30e-4458-4d0e-b476-107c4f92f56a" Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.384779 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7" event={"ID":"240013fb-5ef1-4923-8c16-3967ff1a47e8","Type":"ContainerStarted","Data":"adc6ae965666a053a2560ee97749f3c5f371f91aec2f640a24ea231d86aaec14"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.387124 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl" event={"ID":"4b55dbea-68c7-4290-a698-068c741b22a6","Type":"ContainerStarted","Data":"51c8a53534ba092fdd59b101c2f6ba00c4ae9c54f698c116a4c52647deea99e5"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.395622 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w" event={"ID":"f49045c6-275a-46b4-8f61-9fd85401869f","Type":"ContainerStarted","Data":"41c4fbed399f7f81322c05346a60e93797d29af0183a0f0ec6fad023b4fc845a"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.407530 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz" event={"ID":"5ef47d10-5c5b-4cd6-b194-753a9fcc1b54","Type":"ContainerStarted","Data":"4c24cb955b0f9510f6d9d40739b4cf77eb5fe469ffd769ea9029638e0fd145ce"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.409794 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w" event={"ID":"a25d04de-e230-4750-a4c9-6a43bf344b9e","Type":"ContainerStarted","Data":"71a84f7b90fb02d3cbf436d85eba57ba971b63cbd0d93de099c22f49a0d67620"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.412230 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5" event={"ID":"ec47f682-ce44-4e37-980b-a5e1c1142284","Type":"ContainerStarted","Data":"2e71279093c0381b5982a2c3ab87c98ca5a460e4479845dd3965befdb6030e81"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.415346 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6" event={"ID":"a54f31fa-c7c6-48b0-90b9-df614b116b8c","Type":"ContainerStarted","Data":"ab47e3400e544c31d923ecb39ae0e24c29c0fade46ee0f4afbebe34d79b5ea8a"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.417607 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk" event={"ID":"97caeb4d-2742-41fb-ac4f-440555163d81","Type":"ContainerStarted","Data":"1aa4da7945bcfdb1cb1c442f8483e2bbbb7676b6e6e13ca21879b274ec953f24"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.420241 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" event={"ID":"43a120e7-09d2-4d8e-8acd-06b6ab38cc40","Type":"ContainerStarted","Data":"e54dbddf60b28e65d58f4b5105f436ac6e07b3ad0846c49f6bd090670e310d66"} Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.422988 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" podUID="43a120e7-09d2-4d8e-8acd-06b6ab38cc40" Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.423702 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" event={"ID":"395c93a8-4649-4ddf-b630-d9982c670991","Type":"ContainerStarted","Data":"1cc0c92a6b2c38ec21b26a35590e5b0de88b4e1b22c06e04223e696dfa7463bb"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.430417 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv" event={"ID":"7bb9718d-9129-421f-8f7e-8b5c8d7d9e53","Type":"ContainerStarted","Data":"f4259746b6c75af82b6a96e484e62e5e51ed37d57e84de8ce4f1748f8fcba0d0"} Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.431569 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" podUID="395c93a8-4649-4ddf-b630-d9982c670991" Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.433689 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7" event={"ID":"b098b672-2320-4ba6-bd50-2237cb576d80","Type":"ContainerStarted","Data":"c764ccc595ee0deff1976f3b634c3a9793fb27220d2080255f9cccd9703aa8ff"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.438720 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" event={"ID":"4ddf28f1-4702-477d-b1d8-4f6758b4dc9a","Type":"ContainerStarted","Data":"2ef0cddb74dadc9b2b008ae67ea8040f6a61a6527cfe017b005da65b66c35416"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.443226 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5" event={"ID":"0e786251-b94d-47b4-930f-5f2cac19cc52","Type":"ContainerStarted","Data":"56a7444fdcdf238b9f911bd5face1995ed87aa5dff2c24348951a70c10e2801a"} Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.444246 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" podUID="4ddf28f1-4702-477d-b1d8-4f6758b4dc9a" Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.446027 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf" event={"ID":"84f51195-36c1-4039-af68-b0643b7c27e5","Type":"ContainerStarted","Data":"59a8b183d14b5f2149d076269153f8f6883d8541eb0d91a0533fedcb5e5f2365"} Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.449177 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" event={"ID":"cd874f14-4eb0-46ae-8968-4a52e4a3bc69","Type":"ContainerStarted","Data":"98aca35cbbcbdbc4af596cf03c60953e26db5415e41b2c0685ca677c3c913049"} Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.449303 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf" podUID="84f51195-36c1-4039-af68-b0643b7c27e5" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.460692 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" podUID="cd874f14-4eb0-46ae-8968-4a52e4a3bc69" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.549169 5002 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.549246 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs podName:c295c3ec-fe2b-4ae0-a818-6847d923dc1d nodeName:}" failed. No retries permitted until 2025-12-03 16:49:32.549229741 +0000 UTC m=+1095.963051629 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs") pod "openstack-operator-controller-manager-9f56fc979-b5vdg" (UID: "c295c3ec-fe2b-4ae0-a818-6847d923dc1d") : secret "metrics-server-cert" not found Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.549413 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:30 crc kubenswrapper[5002]: I1203 16:49:30.549557 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.550112 5002 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 16:49:30 crc kubenswrapper[5002]: E1203 16:49:30.550274 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs podName:c295c3ec-fe2b-4ae0-a818-6847d923dc1d nodeName:}" failed. No retries permitted until 2025-12-03 16:49:32.550226048 +0000 UTC m=+1095.964047926 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs") pod "openstack-operator-controller-manager-9f56fc979-b5vdg" (UID: "c295c3ec-fe2b-4ae0-a818-6847d923dc1d") : secret "webhook-server-cert" not found Dec 03 16:49:31 crc kubenswrapper[5002]: E1203 16:49:31.476683 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf" podUID="84f51195-36c1-4039-af68-b0643b7c27e5" Dec 03 16:49:31 crc kubenswrapper[5002]: E1203 16:49:31.477877 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" podUID="4ddf28f1-4702-477d-b1d8-4f6758b4dc9a" Dec 03 16:49:31 crc kubenswrapper[5002]: E1203 16:49:31.477951 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" podUID="6fc5b30e-4458-4d0e-b476-107c4f92f56a" Dec 03 16:49:31 crc kubenswrapper[5002]: E1203 16:49:31.478154 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" podUID="395c93a8-4649-4ddf-b630-d9982c670991" Dec 03 16:49:31 crc kubenswrapper[5002]: E1203 16:49:31.479000 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" podUID="43a120e7-09d2-4d8e-8acd-06b6ab38cc40" Dec 03 16:49:31 crc kubenswrapper[5002]: E1203 16:49:31.481186 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" podUID="cd874f14-4eb0-46ae-8968-4a52e4a3bc69" Dec 03 16:49:31 crc kubenswrapper[5002]: I1203 16:49:31.669967 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert\") pod \"infra-operator-controller-manager-57548d458d-95x4f\" (UID: \"c23347dc-e104-4ca5-a132-60a102150117\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:49:31 crc kubenswrapper[5002]: E1203 16:49:31.671422 5002 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 16:49:31 crc kubenswrapper[5002]: E1203 16:49:31.682321 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert podName:c23347dc-e104-4ca5-a132-60a102150117 nodeName:}" failed. No retries permitted until 2025-12-03 16:49:35.682279769 +0000 UTC m=+1099.096101717 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert") pod "infra-operator-controller-manager-57548d458d-95x4f" (UID: "c23347dc-e104-4ca5-a132-60a102150117") : secret "infra-operator-webhook-server-cert" not found Dec 03 16:49:32 crc kubenswrapper[5002]: I1203 16:49:32.190698 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686z66jk\" (UID: \"127b2eae-6b24-45a7-871b-e9569b062e28\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:49:32 crc kubenswrapper[5002]: E1203 16:49:32.190982 5002 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:49:32 crc kubenswrapper[5002]: E1203 16:49:32.191045 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert podName:127b2eae-6b24-45a7-871b-e9569b062e28 nodeName:}" failed. No retries permitted until 2025-12-03 16:49:36.19102249 +0000 UTC m=+1099.604844378 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert") pod "openstack-baremetal-operator-controller-manager-55d86b6686z66jk" (UID: "127b2eae-6b24-45a7-871b-e9569b062e28") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:49:32 crc kubenswrapper[5002]: I1203 16:49:32.596612 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:32 crc kubenswrapper[5002]: I1203 16:49:32.596712 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:32 crc kubenswrapper[5002]: E1203 16:49:32.596857 5002 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 16:49:32 crc kubenswrapper[5002]: E1203 16:49:32.596898 5002 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 16:49:32 crc kubenswrapper[5002]: E1203 16:49:32.596954 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs podName:c295c3ec-fe2b-4ae0-a818-6847d923dc1d nodeName:}" failed. No retries permitted until 2025-12-03 16:49:36.596930737 +0000 UTC m=+1100.010752625 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs") pod "openstack-operator-controller-manager-9f56fc979-b5vdg" (UID: "c295c3ec-fe2b-4ae0-a818-6847d923dc1d") : secret "metrics-server-cert" not found Dec 03 16:49:32 crc kubenswrapper[5002]: E1203 16:49:32.596978 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs podName:c295c3ec-fe2b-4ae0-a818-6847d923dc1d nodeName:}" failed. No retries permitted until 2025-12-03 16:49:36.596970188 +0000 UTC m=+1100.010792076 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs") pod "openstack-operator-controller-manager-9f56fc979-b5vdg" (UID: "c295c3ec-fe2b-4ae0-a818-6847d923dc1d") : secret "webhook-server-cert" not found Dec 03 16:49:35 crc kubenswrapper[5002]: I1203 16:49:35.750811 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert\") pod \"infra-operator-controller-manager-57548d458d-95x4f\" (UID: \"c23347dc-e104-4ca5-a132-60a102150117\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:49:35 crc kubenswrapper[5002]: E1203 16:49:35.751006 5002 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 16:49:35 crc kubenswrapper[5002]: E1203 16:49:35.751105 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert podName:c23347dc-e104-4ca5-a132-60a102150117 nodeName:}" failed. No retries permitted until 2025-12-03 16:49:43.751085534 +0000 UTC m=+1107.164907422 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert") pod "infra-operator-controller-manager-57548d458d-95x4f" (UID: "c23347dc-e104-4ca5-a132-60a102150117") : secret "infra-operator-webhook-server-cert" not found Dec 03 16:49:36 crc kubenswrapper[5002]: I1203 16:49:36.278983 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686z66jk\" (UID: \"127b2eae-6b24-45a7-871b-e9569b062e28\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:49:36 crc kubenswrapper[5002]: E1203 16:49:36.279341 5002 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:49:36 crc kubenswrapper[5002]: E1203 16:49:36.279449 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert podName:127b2eae-6b24-45a7-871b-e9569b062e28 nodeName:}" failed. No retries permitted until 2025-12-03 16:49:44.27941102 +0000 UTC m=+1107.693232918 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert") pod "openstack-baremetal-operator-controller-manager-55d86b6686z66jk" (UID: "127b2eae-6b24-45a7-871b-e9569b062e28") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:49:36 crc kubenswrapper[5002]: I1203 16:49:36.685331 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:36 crc kubenswrapper[5002]: I1203 16:49:36.685472 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:36 crc kubenswrapper[5002]: E1203 16:49:36.685603 5002 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 16:49:36 crc kubenswrapper[5002]: E1203 16:49:36.685683 5002 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 16:49:36 crc kubenswrapper[5002]: E1203 16:49:36.685733 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs podName:c295c3ec-fe2b-4ae0-a818-6847d923dc1d nodeName:}" failed. No retries permitted until 2025-12-03 16:49:44.685706087 +0000 UTC m=+1108.099527975 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs") pod "openstack-operator-controller-manager-9f56fc979-b5vdg" (UID: "c295c3ec-fe2b-4ae0-a818-6847d923dc1d") : secret "webhook-server-cert" not found Dec 03 16:49:36 crc kubenswrapper[5002]: E1203 16:49:36.685835 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs podName:c295c3ec-fe2b-4ae0-a818-6847d923dc1d nodeName:}" failed. No retries permitted until 2025-12-03 16:49:44.6857996 +0000 UTC m=+1108.099621478 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs") pod "openstack-operator-controller-manager-9f56fc979-b5vdg" (UID: "c295c3ec-fe2b-4ae0-a818-6847d923dc1d") : secret "metrics-server-cert" not found Dec 03 16:49:43 crc kubenswrapper[5002]: I1203 16:49:43.831564 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert\") pod \"infra-operator-controller-manager-57548d458d-95x4f\" (UID: \"c23347dc-e104-4ca5-a132-60a102150117\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:49:43 crc kubenswrapper[5002]: E1203 16:49:43.831889 5002 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 16:49:43 crc kubenswrapper[5002]: E1203 16:49:43.832607 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert podName:c23347dc-e104-4ca5-a132-60a102150117 nodeName:}" failed. No retries permitted until 2025-12-03 16:49:59.832574489 +0000 UTC m=+1123.246396387 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert") pod "infra-operator-controller-manager-57548d458d-95x4f" (UID: "c23347dc-e104-4ca5-a132-60a102150117") : secret "infra-operator-webhook-server-cert" not found Dec 03 16:49:44 crc kubenswrapper[5002]: I1203 16:49:44.341081 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686z66jk\" (UID: \"127b2eae-6b24-45a7-871b-e9569b062e28\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:49:44 crc kubenswrapper[5002]: E1203 16:49:44.341548 5002 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:49:44 crc kubenswrapper[5002]: E1203 16:49:44.342219 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert podName:127b2eae-6b24-45a7-871b-e9569b062e28 nodeName:}" failed. No retries permitted until 2025-12-03 16:50:00.342170776 +0000 UTC m=+1123.755992664 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert") pod "openstack-baremetal-operator-controller-manager-55d86b6686z66jk" (UID: "127b2eae-6b24-45a7-871b-e9569b062e28") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:49:44 crc kubenswrapper[5002]: I1203 16:49:44.749766 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:44 crc kubenswrapper[5002]: I1203 16:49:44.749899 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:49:44 crc kubenswrapper[5002]: E1203 16:49:44.749916 5002 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 16:49:44 crc kubenswrapper[5002]: E1203 16:49:44.750011 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs podName:c295c3ec-fe2b-4ae0-a818-6847d923dc1d nodeName:}" failed. No retries permitted until 2025-12-03 16:50:00.749987676 +0000 UTC m=+1124.163809764 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs") pod "openstack-operator-controller-manager-9f56fc979-b5vdg" (UID: "c295c3ec-fe2b-4ae0-a818-6847d923dc1d") : secret "metrics-server-cert" not found Dec 03 16:49:44 crc kubenswrapper[5002]: E1203 16:49:44.750127 5002 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 16:49:44 crc kubenswrapper[5002]: E1203 16:49:44.750239 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs podName:c295c3ec-fe2b-4ae0-a818-6847d923dc1d nodeName:}" failed. No retries permitted until 2025-12-03 16:50:00.750188071 +0000 UTC m=+1124.164010129 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs") pod "openstack-operator-controller-manager-9f56fc979-b5vdg" (UID: "c295c3ec-fe2b-4ae0-a818-6847d923dc1d") : secret "webhook-server-cert" not found Dec 03 16:49:45 crc kubenswrapper[5002]: E1203 16:49:45.355459 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 03 16:49:45 crc kubenswrapper[5002]: E1203 16:49:45.355663 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xzk2g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-lfdk7_openstack-operators(b098b672-2320-4ba6-bd50-2237cb576d80): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:49:46 crc kubenswrapper[5002]: E1203 16:49:46.149638 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 03 16:49:46 crc kubenswrapper[5002]: E1203 16:49:46.150077 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mkvvj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-bfqzl_openstack-operators(4b55dbea-68c7-4290-a698-068c741b22a6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:49:46 crc kubenswrapper[5002]: I1203 16:49:46.580379 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6" event={"ID":"a54f31fa-c7c6-48b0-90b9-df614b116b8c","Type":"ContainerStarted","Data":"c8a018551fc12071bc6673aee2fc2bdd32b99d30657a672531fec1bab87219ff"} Dec 03 16:49:46 crc kubenswrapper[5002]: I1203 16:49:46.582596 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt" event={"ID":"fbb180bd-c957-4362-a7ac-04065940a34e","Type":"ContainerStarted","Data":"a0da1f1c7e4ea499f27919c9b3631a7cff72a0bd54d3a44fcc1afddcdb6937b8"} Dec 03 16:49:47 crc kubenswrapper[5002]: I1203 16:49:47.601046 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w" event={"ID":"f49045c6-275a-46b4-8f61-9fd85401869f","Type":"ContainerStarted","Data":"233d37bf35344adc5d9b14227b36c20f6752001a9978d38ef279db40a50b8174"} Dec 03 16:49:52 crc kubenswrapper[5002]: I1203 16:49:52.640864 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw" event={"ID":"e4af56b3-88df-4902-b51a-1ed81ced8583","Type":"ContainerStarted","Data":"7d1074858c15158d4db54fcac3e4b417fc60bdc27aae97e382de6faa9170460f"} Dec 03 16:49:53 crc kubenswrapper[5002]: I1203 16:49:53.653581 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7" event={"ID":"240013fb-5ef1-4923-8c16-3967ff1a47e8","Type":"ContainerStarted","Data":"e2c9e43550407965d1c20ed3171b759400684ad1a6ab964e65ff307cc2e1fd73"} Dec 03 16:49:54 crc kubenswrapper[5002]: I1203 16:49:54.668910 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5" event={"ID":"ec47f682-ce44-4e37-980b-a5e1c1142284","Type":"ContainerStarted","Data":"b2eb8ccbc876c0fc9ce9541522279b627ae4f99e29f9f54e2fa93367cf0b48f5"} Dec 03 16:49:54 crc kubenswrapper[5002]: I1203 16:49:54.671649 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv" event={"ID":"7bb9718d-9129-421f-8f7e-8b5c8d7d9e53","Type":"ContainerStarted","Data":"8829625c308f2dd295b89e06ba35cf7c623f6ed05c381ef7a27b16b59e65bb9f"} Dec 03 16:49:55 crc kubenswrapper[5002]: I1203 16:49:55.510281 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 16:49:55 crc kubenswrapper[5002]: I1203 16:49:55.697576 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5" event={"ID":"0e786251-b94d-47b4-930f-5f2cac19cc52","Type":"ContainerStarted","Data":"4d7c7ea43c6254b66241b262baf940a14d12228166da5ae715cacde18f76fedf"} Dec 03 16:49:55 crc kubenswrapper[5002]: I1203 16:49:55.700468 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk" event={"ID":"97caeb4d-2742-41fb-ac4f-440555163d81","Type":"ContainerStarted","Data":"528f0a626a788af1db07622339e2e3723fbc54f8c4a78c21128d029ddab61b41"} Dec 03 16:49:55 crc kubenswrapper[5002]: I1203 16:49:55.703953 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w" event={"ID":"a25d04de-e230-4750-a4c9-6a43bf344b9e","Type":"ContainerStarted","Data":"ecfb9c63eb8f9a748f349a159544320360d77f9a92f07d421d950966050a2136"} Dec 03 16:49:56 crc kubenswrapper[5002]: I1203 16:49:56.717986 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv" event={"ID":"af132426-0104-4abf-bad5-67615b919af7","Type":"ContainerStarted","Data":"a8d9b8f92eddefd0277084938bb1a4b384c44a64810290ecf7829f91a19c56d8"} Dec 03 16:49:59 crc kubenswrapper[5002]: I1203 16:49:59.858856 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert\") pod \"infra-operator-controller-manager-57548d458d-95x4f\" (UID: \"c23347dc-e104-4ca5-a132-60a102150117\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:49:59 crc kubenswrapper[5002]: I1203 16:49:59.871435 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c23347dc-e104-4ca5-a132-60a102150117-cert\") pod \"infra-operator-controller-manager-57548d458d-95x4f\" (UID: \"c23347dc-e104-4ca5-a132-60a102150117\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:50:00 crc kubenswrapper[5002]: I1203 16:50:00.044187 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:50:00 crc kubenswrapper[5002]: I1203 16:50:00.368473 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686z66jk\" (UID: \"127b2eae-6b24-45a7-871b-e9569b062e28\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:50:00 crc kubenswrapper[5002]: I1203 16:50:00.374234 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/127b2eae-6b24-45a7-871b-e9569b062e28-cert\") pod \"openstack-baremetal-operator-controller-manager-55d86b6686z66jk\" (UID: \"127b2eae-6b24-45a7-871b-e9569b062e28\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:50:00 crc kubenswrapper[5002]: I1203 16:50:00.494167 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:50:00 crc kubenswrapper[5002]: I1203 16:50:00.781409 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:50:00 crc kubenswrapper[5002]: I1203 16:50:00.793842 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:50:00 crc kubenswrapper[5002]: I1203 16:50:00.801472 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" event={"ID":"4ddf28f1-4702-477d-b1d8-4f6758b4dc9a","Type":"ContainerStarted","Data":"cec2553318744f3f30452a2b01bc137e5a2acc292e9624a16b9650bc25e819ba"} Dec 03 16:50:00 crc kubenswrapper[5002]: I1203 16:50:00.803321 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz" event={"ID":"5ef47d10-5c5b-4cd6-b194-753a9fcc1b54","Type":"ContainerStarted","Data":"fff1482ab60ec03d7fb9322f7819f197088c070e0fd9cf5eeac32f96d4dbddc0"} Dec 03 16:50:00 crc kubenswrapper[5002]: I1203 16:50:00.807073 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-webhook-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:50:00 crc kubenswrapper[5002]: I1203 16:50:00.807429 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c295c3ec-fe2b-4ae0-a818-6847d923dc1d-metrics-certs\") pod \"openstack-operator-controller-manager-9f56fc979-b5vdg\" (UID: \"c295c3ec-fe2b-4ae0-a818-6847d923dc1d\") " pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:50:00 crc kubenswrapper[5002]: I1203 16:50:00.973204 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.135979 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk"] Dec 03 16:50:01 crc kubenswrapper[5002]: W1203 16:50:01.157726 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod127b2eae_6b24_45a7_871b_e9569b062e28.slice/crio-cf55907832995a822b42b52a2162f888a9ac1db454f0a8ae07c280b195dd448e WatchSource:0}: Error finding container cf55907832995a822b42b52a2162f888a9ac1db454f0a8ae07c280b195dd448e: Status 404 returned error can't find the container with id cf55907832995a822b42b52a2162f888a9ac1db454f0a8ae07c280b195dd448e Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.236951 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-95x4f"] Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.471826 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg"] Dec 03 16:50:01 crc kubenswrapper[5002]: E1203 16:50:01.515593 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7" podUID="b098b672-2320-4ba6-bd50-2237cb576d80" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.822921 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7" event={"ID":"b098b672-2320-4ba6-bd50-2237cb576d80","Type":"ContainerStarted","Data":"4f08ddb6f50a01a8024c952fdfcedde3cbf7baa676159c3e367282c8d3032268"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.833464 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz" event={"ID":"5ef47d10-5c5b-4cd6-b194-753a9fcc1b54","Type":"ContainerStarted","Data":"85b1bd88b8d39d0a931ac8cb1a1ee1f0a4c61269d46ac54008069b1e7bb32778"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.835730 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.838458 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt" event={"ID":"fbb180bd-c957-4362-a7ac-04065940a34e","Type":"ContainerStarted","Data":"ac8145863942fdcae6a2fbeac7525f4bd126ac3026428ddf1cac98b9f7a2cee2"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.839467 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.845937 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.855362 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" event={"ID":"43a120e7-09d2-4d8e-8acd-06b6ab38cc40","Type":"ContainerStarted","Data":"e971266d62a7e86f6d57c5008018c06498c796ca36d0fc874bfe1085eac3cdf8"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.855423 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" event={"ID":"43a120e7-09d2-4d8e-8acd-06b6ab38cc40","Type":"ContainerStarted","Data":"e7b994acfb6ae4820fd26d317bc33f1f68ed3eaee7c48015082ac55ca169896d"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.856270 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.865466 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf" event={"ID":"84f51195-36c1-4039-af68-b0643b7c27e5","Type":"ContainerStarted","Data":"67eda520f991bc4bee24e3eac3d7d106be78b3246895febf5d0c1106bc1a03be"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.879957 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz" podStartSLOduration=4.044893396 podStartE2EDuration="34.879932768s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:30.096167638 +0000 UTC m=+1093.509989516" lastFinishedPulling="2025-12-03 16:50:00.931207 +0000 UTC m=+1124.345028888" observedRunningTime="2025-12-03 16:50:01.879421635 +0000 UTC m=+1125.293243523" watchObservedRunningTime="2025-12-03 16:50:01.879932768 +0000 UTC m=+1125.293754656" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.886001 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" event={"ID":"cd874f14-4eb0-46ae-8968-4a52e4a3bc69","Type":"ContainerStarted","Data":"7bf03f70c527a16b20a54d06788bb75d939ab82f42b215f0c58d6f971b32df6c"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.886058 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" event={"ID":"cd874f14-4eb0-46ae-8968-4a52e4a3bc69","Type":"ContainerStarted","Data":"388221fb4bc70529eb638be01921882da6ed4575859e5ed80f31a171b32ee540"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.886995 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.894356 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" event={"ID":"395c93a8-4649-4ddf-b630-d9982c670991","Type":"ContainerStarted","Data":"b8074606a5efce2950768cbf21fd9137609618b12d3f06aeb4fe6b76ed899b2e"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.914491 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" event={"ID":"6fc5b30e-4458-4d0e-b476-107c4f92f56a","Type":"ContainerStarted","Data":"c43329e3ee7bcab7ef2e25c348d385147c3b8363d3c2ca6bce01aa094514f9ba"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.914567 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" event={"ID":"6fc5b30e-4458-4d0e-b476-107c4f92f56a","Type":"ContainerStarted","Data":"54c53c1df493124ffc6625090b8b8401dbc341797c1610963e91e28b9db29639"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.915416 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.923682 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" event={"ID":"127b2eae-6b24-45a7-871b-e9569b062e28","Type":"ContainerStarted","Data":"cf55907832995a822b42b52a2162f888a9ac1db454f0a8ae07c280b195dd448e"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.934414 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-bp8bt" podStartSLOduration=3.071400927 podStartE2EDuration="34.934379629s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:29.063374335 +0000 UTC m=+1092.477196223" lastFinishedPulling="2025-12-03 16:50:00.926353037 +0000 UTC m=+1124.340174925" observedRunningTime="2025-12-03 16:50:01.915152496 +0000 UTC m=+1125.328974384" watchObservedRunningTime="2025-12-03 16:50:01.934379629 +0000 UTC m=+1125.348201517" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.944756 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7" event={"ID":"240013fb-5ef1-4923-8c16-3967ff1a47e8","Type":"ContainerStarted","Data":"aad42e766658b64c22a4ef66b1d7506865bd7cbf314dd3bf6f8f7756bfa15408"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.946064 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.948606 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.962240 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" podStartSLOduration=8.505588963 podStartE2EDuration="34.962206216s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:30.136374045 +0000 UTC m=+1093.550195933" lastFinishedPulling="2025-12-03 16:49:56.592991298 +0000 UTC m=+1120.006813186" observedRunningTime="2025-12-03 16:50:01.94546527 +0000 UTC m=+1125.359287168" watchObservedRunningTime="2025-12-03 16:50:01.962206216 +0000 UTC m=+1125.376028104" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.979181 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv" event={"ID":"af132426-0104-4abf-bad5-67615b919af7","Type":"ContainerStarted","Data":"4456b852a1726d0d56f089186ab18ac975bea72d89741d33a63f80b2c577dcb0"} Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.980579 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.988452 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" podStartSLOduration=4.418641418 podStartE2EDuration="33.988427439s" podCreationTimestamp="2025-12-03 16:49:28 +0000 UTC" firstStartedPulling="2025-12-03 16:49:30.246544021 +0000 UTC m=+1093.660365899" lastFinishedPulling="2025-12-03 16:49:59.816330022 +0000 UTC m=+1123.230151920" observedRunningTime="2025-12-03 16:50:01.972891047 +0000 UTC m=+1125.386712925" watchObservedRunningTime="2025-12-03 16:50:01.988427439 +0000 UTC m=+1125.402249327" Dec 03 16:50:01 crc kubenswrapper[5002]: I1203 16:50:01.988939 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv" Dec 03 16:50:02 crc kubenswrapper[5002]: I1203 16:50:02.004512 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" event={"ID":"c295c3ec-fe2b-4ae0-a818-6847d923dc1d","Type":"ContainerStarted","Data":"ad317f293452956b8fa3c81120de942c40552d3a9fbd88c52076a96df54b9ae6"} Dec 03 16:50:02 crc kubenswrapper[5002]: I1203 16:50:02.007318 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" event={"ID":"c23347dc-e104-4ca5-a132-60a102150117","Type":"ContainerStarted","Data":"afc357574f22e15ae9b2eb9a426fb2c199a843986d09194676e930671eeaebb9"} Dec 03 16:50:02 crc kubenswrapper[5002]: I1203 16:50:02.035857 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" podStartSLOduration=7.114922169 podStartE2EDuration="34.035830318s" podCreationTimestamp="2025-12-03 16:49:28 +0000 UTC" firstStartedPulling="2025-12-03 16:49:30.237830363 +0000 UTC m=+1093.651652251" lastFinishedPulling="2025-12-03 16:49:57.158738512 +0000 UTC m=+1120.572560400" observedRunningTime="2025-12-03 16:50:01.995986764 +0000 UTC m=+1125.409808652" watchObservedRunningTime="2025-12-03 16:50:02.035830318 +0000 UTC m=+1125.449652206" Dec 03 16:50:02 crc kubenswrapper[5002]: I1203 16:50:02.036013 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zgpvf" podStartSLOduration=3.687595098 podStartE2EDuration="34.036006373s" podCreationTimestamp="2025-12-03 16:49:28 +0000 UTC" firstStartedPulling="2025-12-03 16:49:30.229606549 +0000 UTC m=+1093.643428437" lastFinishedPulling="2025-12-03 16:50:00.578017824 +0000 UTC m=+1123.991839712" observedRunningTime="2025-12-03 16:50:02.031764478 +0000 UTC m=+1125.445586386" watchObservedRunningTime="2025-12-03 16:50:02.036006373 +0000 UTC m=+1125.449828281" Dec 03 16:50:02 crc kubenswrapper[5002]: I1203 16:50:02.128948 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tjcc7" podStartSLOduration=3.9666630769999998 podStartE2EDuration="35.128927159s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:29.749835018 +0000 UTC m=+1093.163656916" lastFinishedPulling="2025-12-03 16:50:00.91209911 +0000 UTC m=+1124.325920998" observedRunningTime="2025-12-03 16:50:02.071135078 +0000 UTC m=+1125.484956966" watchObservedRunningTime="2025-12-03 16:50:02.128927159 +0000 UTC m=+1125.542749047" Dec 03 16:50:02 crc kubenswrapper[5002]: I1203 16:50:02.170529 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-b2xhv" podStartSLOduration=3.69641769 podStartE2EDuration="35.17050683s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:29.557832768 +0000 UTC m=+1092.971654656" lastFinishedPulling="2025-12-03 16:50:01.031921908 +0000 UTC m=+1124.445743796" observedRunningTime="2025-12-03 16:50:02.128823827 +0000 UTC m=+1125.542645725" watchObservedRunningTime="2025-12-03 16:50:02.17050683 +0000 UTC m=+1125.584328718" Dec 03 16:50:02 crc kubenswrapper[5002]: E1203 16:50:02.605350 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl" podUID="4b55dbea-68c7-4290-a698-068c741b22a6" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.021121 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv" event={"ID":"7bb9718d-9129-421f-8f7e-8b5c8d7d9e53","Type":"ContainerStarted","Data":"ee1565c903a14b029e11235ad699708cab095141ee42e762a493ea582be47783"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.021365 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.024192 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7" event={"ID":"b098b672-2320-4ba6-bd50-2237cb576d80","Type":"ContainerStarted","Data":"705ec68462cdde27c4778b53b0a6412e8ebd323c51a09f68060dd86b6fcdd3d6"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.024306 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.027157 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.027400 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" event={"ID":"4ddf28f1-4702-477d-b1d8-4f6758b4dc9a","Type":"ContainerStarted","Data":"50010da5c9f5da9bd9b0acabb04ed7e66520d46ee3338937b22d88a9ab2a5e56"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.027549 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.029487 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" event={"ID":"c295c3ec-fe2b-4ae0-a818-6847d923dc1d","Type":"ContainerStarted","Data":"afcd24c662047f3b7c6ae192409c3925b35b08ab5a8b8010aa2d95bfc4966a44"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.030329 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.035497 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w" event={"ID":"f49045c6-275a-46b4-8f61-9fd85401869f","Type":"ContainerStarted","Data":"ed2fb52f4754411bc1cce9e0752fad85f5f547a5f512e9599128b611934e514f"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.036320 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.038522 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.040071 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-w5vlv" podStartSLOduration=4.233557998 podStartE2EDuration="36.040059506s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:29.904372254 +0000 UTC m=+1093.318194142" lastFinishedPulling="2025-12-03 16:50:01.710873762 +0000 UTC m=+1125.124695650" observedRunningTime="2025-12-03 16:50:03.037901187 +0000 UTC m=+1126.451723075" watchObservedRunningTime="2025-12-03 16:50:03.040059506 +0000 UTC m=+1126.453881394" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.052685 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w" event={"ID":"a25d04de-e230-4750-a4c9-6a43bf344b9e","Type":"ContainerStarted","Data":"a41b802b48173d187e02fccd9d78f844483c6e2ec5e8c09512efb6566dcd514b"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.054764 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.058380 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.075473 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw" event={"ID":"e4af56b3-88df-4902-b51a-1ed81ced8583","Type":"ContainerStarted","Data":"8cb1aa6b81505d7f84224f42e3f8b16c72e98d8d4ece0face4efc911e8ff20be"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.077787 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.079561 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.080059 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rlh8w" podStartSLOduration=3.20284002 podStartE2EDuration="36.080030833s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:29.589997835 +0000 UTC m=+1093.003819723" lastFinishedPulling="2025-12-03 16:50:02.467188648 +0000 UTC m=+1125.881010536" observedRunningTime="2025-12-03 16:50:03.079871069 +0000 UTC m=+1126.493692957" watchObservedRunningTime="2025-12-03 16:50:03.080030833 +0000 UTC m=+1126.493852721" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.104408 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5" event={"ID":"ec47f682-ce44-4e37-980b-a5e1c1142284","Type":"ContainerStarted","Data":"f6e120e5090d34be9524d3ee4083c30a387d33f9320951e22c1399c8e7e09718"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.105367 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.107660 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.118738 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" event={"ID":"395c93a8-4649-4ddf-b630-d9982c670991","Type":"ContainerStarted","Data":"280833ae7fb489b43f2b28968990921618451d9d4399ff3bf6f46b7d1ee6bb0a"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.119181 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.122569 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" podStartSLOduration=35.12255494 podStartE2EDuration="35.12255494s" podCreationTimestamp="2025-12-03 16:49:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:50:03.115520589 +0000 UTC m=+1126.529342477" watchObservedRunningTime="2025-12-03 16:50:03.12255494 +0000 UTC m=+1126.536376828" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.132886 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl" event={"ID":"4b55dbea-68c7-4290-a698-068c741b22a6","Type":"ContainerStarted","Data":"311aa79466f20273ed13ed20f570befb4dd05709ab0e632b504d177a6dc2ce2d"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.140379 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6" event={"ID":"a54f31fa-c7c6-48b0-90b9-df614b116b8c","Type":"ContainerStarted","Data":"a0e64507c3a8d863c3293ee6b3bdbf0a1d1c1bbb292e0f01d15c9ae3720f1bcb"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.141612 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.150447 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" podStartSLOduration=4.086315601 podStartE2EDuration="35.150427838s" podCreationTimestamp="2025-12-03 16:49:28 +0000 UTC" firstStartedPulling="2025-12-03 16:49:30.235826329 +0000 UTC m=+1093.649648227" lastFinishedPulling="2025-12-03 16:50:01.299938576 +0000 UTC m=+1124.713760464" observedRunningTime="2025-12-03 16:50:03.148455704 +0000 UTC m=+1126.562277602" watchObservedRunningTime="2025-12-03 16:50:03.150427838 +0000 UTC m=+1126.564249726" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.158927 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5" event={"ID":"0e786251-b94d-47b4-930f-5f2cac19cc52","Type":"ContainerStarted","Data":"14c48af584ba5e33b71a24b9d88274125d0ce02ebd98cf91cf1adc290d2820ac"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.159644 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.170181 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.174092 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.185176 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk" event={"ID":"97caeb4d-2742-41fb-ac4f-440555163d81","Type":"ContainerStarted","Data":"4d21b53d5bcfe4ad1df8aa53f31a4c6f389d18c5d3acb6283b5756ae4b30e4cf"} Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.185231 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.187917 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.255550 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5" podStartSLOduration=4.925282293 podStartE2EDuration="36.255531296s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:29.940901491 +0000 UTC m=+1093.354723379" lastFinishedPulling="2025-12-03 16:50:01.271150494 +0000 UTC m=+1124.684972382" observedRunningTime="2025-12-03 16:50:03.255212708 +0000 UTC m=+1126.669034596" watchObservedRunningTime="2025-12-03 16:50:03.255531296 +0000 UTC m=+1126.669353184" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.259339 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7" podStartSLOduration=2.768107918 podStartE2EDuration="35.259323869s" podCreationTimestamp="2025-12-03 16:49:28 +0000 UTC" firstStartedPulling="2025-12-03 16:49:30.07390749 +0000 UTC m=+1093.487729378" lastFinishedPulling="2025-12-03 16:50:02.565123441 +0000 UTC m=+1125.978945329" observedRunningTime="2025-12-03 16:50:03.232820059 +0000 UTC m=+1126.646641947" watchObservedRunningTime="2025-12-03 16:50:03.259323869 +0000 UTC m=+1126.673145757" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.284627 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-8hlz6" podStartSLOduration=3.882511399 podStartE2EDuration="36.284608357s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:29.749866058 +0000 UTC m=+1093.163687946" lastFinishedPulling="2025-12-03 16:50:02.151963016 +0000 UTC m=+1125.565784904" observedRunningTime="2025-12-03 16:50:03.282115869 +0000 UTC m=+1126.695937757" watchObservedRunningTime="2025-12-03 16:50:03.284608357 +0000 UTC m=+1126.698430245" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.360602 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-s6d9w" podStartSLOduration=4.36297633 podStartE2EDuration="36.360582633s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:29.91668279 +0000 UTC m=+1093.330504678" lastFinishedPulling="2025-12-03 16:50:01.914289093 +0000 UTC m=+1125.328110981" observedRunningTime="2025-12-03 16:50:03.31413498 +0000 UTC m=+1126.727956908" watchObservedRunningTime="2025-12-03 16:50:03.360582633 +0000 UTC m=+1126.774404521" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.405525 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cj9f5" podStartSLOduration=5.1539833250000004 podStartE2EDuration="36.405506224s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:30.076117661 +0000 UTC m=+1093.489939549" lastFinishedPulling="2025-12-03 16:50:01.32764056 +0000 UTC m=+1124.741462448" observedRunningTime="2025-12-03 16:50:03.400335643 +0000 UTC m=+1126.814157531" watchObservedRunningTime="2025-12-03 16:50:03.405506224 +0000 UTC m=+1126.819328112" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.482986 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rm2fw" podStartSLOduration=5.058370308 podStartE2EDuration="36.48296517s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:29.904035195 +0000 UTC m=+1093.317857083" lastFinishedPulling="2025-12-03 16:50:01.328630057 +0000 UTC m=+1124.742451945" observedRunningTime="2025-12-03 16:50:03.475714293 +0000 UTC m=+1126.889536181" watchObservedRunningTime="2025-12-03 16:50:03.48296517 +0000 UTC m=+1126.896787058" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.483112 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-jbfdk" podStartSLOduration=3.575075297 podStartE2EDuration="35.483107314s" podCreationTimestamp="2025-12-03 16:49:28 +0000 UTC" firstStartedPulling="2025-12-03 16:49:30.224789298 +0000 UTC m=+1093.638611176" lastFinishedPulling="2025-12-03 16:50:02.132821305 +0000 UTC m=+1125.546643193" observedRunningTime="2025-12-03 16:50:03.433152796 +0000 UTC m=+1126.846974684" watchObservedRunningTime="2025-12-03 16:50:03.483107314 +0000 UTC m=+1126.896929202" Dec 03 16:50:03 crc kubenswrapper[5002]: I1203 16:50:03.521291 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" podStartSLOduration=5.951997567 podStartE2EDuration="35.521273703s" podCreationTimestamp="2025-12-03 16:49:28 +0000 UTC" firstStartedPulling="2025-12-03 16:49:30.251960299 +0000 UTC m=+1093.665782187" lastFinishedPulling="2025-12-03 16:49:59.821236425 +0000 UTC m=+1123.235058323" observedRunningTime="2025-12-03 16:50:03.511250049 +0000 UTC m=+1126.925071937" watchObservedRunningTime="2025-12-03 16:50:03.521273703 +0000 UTC m=+1126.935095591" Dec 03 16:50:04 crc kubenswrapper[5002]: I1203 16:50:04.196623 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl" event={"ID":"4b55dbea-68c7-4290-a698-068c741b22a6","Type":"ContainerStarted","Data":"e9b78f3b96ae56bb8479b36b0270bab859bcbc572afe495a76fb936a1b34eb06"} Dec 03 16:50:04 crc kubenswrapper[5002]: I1203 16:50:04.223566 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl" podStartSLOduration=3.288660096 podStartE2EDuration="37.223511368s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:49:29.702118876 +0000 UTC m=+1093.115940764" lastFinishedPulling="2025-12-03 16:50:03.636970158 +0000 UTC m=+1127.050792036" observedRunningTime="2025-12-03 16:50:04.222592693 +0000 UTC m=+1127.636414601" watchObservedRunningTime="2025-12-03 16:50:04.223511368 +0000 UTC m=+1127.637333256" Dec 03 16:50:05 crc kubenswrapper[5002]: I1203 16:50:05.208043 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl" Dec 03 16:50:06 crc kubenswrapper[5002]: I1203 16:50:06.216162 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" event={"ID":"127b2eae-6b24-45a7-871b-e9569b062e28","Type":"ContainerStarted","Data":"0aee83eb325ecbaf8971708567cf5b0e30716d899a3446a9324beb41dd6fe62b"} Dec 03 16:50:06 crc kubenswrapper[5002]: I1203 16:50:06.216224 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" event={"ID":"127b2eae-6b24-45a7-871b-e9569b062e28","Type":"ContainerStarted","Data":"46fe24d19a1068ba1225b21539cf58ad7dbf496a195bccd58a185f15584082d0"} Dec 03 16:50:06 crc kubenswrapper[5002]: I1203 16:50:06.216321 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:50:06 crc kubenswrapper[5002]: I1203 16:50:06.220330 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" event={"ID":"c23347dc-e104-4ca5-a132-60a102150117","Type":"ContainerStarted","Data":"8822e4523507cee49eaedd01e53fb04f3dcffb400bf3f0df1117d328b40c6b50"} Dec 03 16:50:06 crc kubenswrapper[5002]: I1203 16:50:06.220367 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" event={"ID":"c23347dc-e104-4ca5-a132-60a102150117","Type":"ContainerStarted","Data":"d8aa1115c8189ae505e4f2b9ef9aa71a011423dfa4ec6e23add9181e60fd8162"} Dec 03 16:50:06 crc kubenswrapper[5002]: I1203 16:50:06.267308 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" podStartSLOduration=33.878774608 podStartE2EDuration="38.267276396s" podCreationTimestamp="2025-12-03 16:49:28 +0000 UTC" firstStartedPulling="2025-12-03 16:50:01.177946769 +0000 UTC m=+1124.591768657" lastFinishedPulling="2025-12-03 16:50:05.566448557 +0000 UTC m=+1128.980270445" observedRunningTime="2025-12-03 16:50:06.260297196 +0000 UTC m=+1129.674119104" watchObservedRunningTime="2025-12-03 16:50:06.267276396 +0000 UTC m=+1129.681098294" Dec 03 16:50:06 crc kubenswrapper[5002]: I1203 16:50:06.285641 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" podStartSLOduration=35.011526177 podStartE2EDuration="39.285613165s" podCreationTimestamp="2025-12-03 16:49:27 +0000 UTC" firstStartedPulling="2025-12-03 16:50:01.299880395 +0000 UTC m=+1124.713702283" lastFinishedPulling="2025-12-03 16:50:05.573967383 +0000 UTC m=+1128.987789271" observedRunningTime="2025-12-03 16:50:06.283539878 +0000 UTC m=+1129.697361776" watchObservedRunningTime="2025-12-03 16:50:06.285613165 +0000 UTC m=+1129.699435053" Dec 03 16:50:07 crc kubenswrapper[5002]: I1203 16:50:07.228243 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:50:08 crc kubenswrapper[5002]: I1203 16:50:08.492608 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bfqzl" Dec 03 16:50:08 crc kubenswrapper[5002]: I1203 16:50:08.622500 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-gp7hz" Dec 03 16:50:08 crc kubenswrapper[5002]: I1203 16:50:08.701827 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ngjzc" Dec 03 16:50:08 crc kubenswrapper[5002]: I1203 16:50:08.878924 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-bwfpx" Dec 03 16:50:08 crc kubenswrapper[5002]: I1203 16:50:08.904319 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-lfdk7" Dec 03 16:50:08 crc kubenswrapper[5002]: I1203 16:50:08.953930 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-4rqsk" Dec 03 16:50:08 crc kubenswrapper[5002]: I1203 16:50:08.984652 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w2vkv" Dec 03 16:50:08 crc kubenswrapper[5002]: I1203 16:50:08.997034 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8jrq7" Dec 03 16:50:10 crc kubenswrapper[5002]: I1203 16:50:10.052315 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-95x4f" Dec 03 16:50:10 crc kubenswrapper[5002]: I1203 16:50:10.502555 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55d86b6686z66jk" Dec 03 16:50:10 crc kubenswrapper[5002]: I1203 16:50:10.983617 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-9f56fc979-b5vdg" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.290359 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5cd484bb89-2lrvn"] Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.292107 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.300425 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-hcvw5" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.300577 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.305733 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.305956 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.315735 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cd484bb89-2lrvn"] Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.388778 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c-config\") pod \"dnsmasq-dns-5cd484bb89-2lrvn\" (UID: \"49d1ed1f-3f24-4e15-8f32-da33a9f03b2c\") " pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.388859 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhbg6\" (UniqueName: \"kubernetes.io/projected/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c-kube-api-access-nhbg6\") pod \"dnsmasq-dns-5cd484bb89-2lrvn\" (UID: \"49d1ed1f-3f24-4e15-8f32-da33a9f03b2c\") " pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.449236 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-567c455747-lrpch"] Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.450532 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.454701 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.474183 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-567c455747-lrpch"] Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.490277 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhbg6\" (UniqueName: \"kubernetes.io/projected/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c-kube-api-access-nhbg6\") pod \"dnsmasq-dns-5cd484bb89-2lrvn\" (UID: \"49d1ed1f-3f24-4e15-8f32-da33a9f03b2c\") " pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.490389 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c-config\") pod \"dnsmasq-dns-5cd484bb89-2lrvn\" (UID: \"49d1ed1f-3f24-4e15-8f32-da33a9f03b2c\") " pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.491260 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c-config\") pod \"dnsmasq-dns-5cd484bb89-2lrvn\" (UID: \"49d1ed1f-3f24-4e15-8f32-da33a9f03b2c\") " pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.511639 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhbg6\" (UniqueName: \"kubernetes.io/projected/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c-kube-api-access-nhbg6\") pod \"dnsmasq-dns-5cd484bb89-2lrvn\" (UID: \"49d1ed1f-3f24-4e15-8f32-da33a9f03b2c\") " pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.592195 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c6b10bf-1037-43b9-9327-e2095b2b72f8-config\") pod \"dnsmasq-dns-567c455747-lrpch\" (UID: \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\") " pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.592853 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7m8h\" (UniqueName: \"kubernetes.io/projected/2c6b10bf-1037-43b9-9327-e2095b2b72f8-kube-api-access-b7m8h\") pod \"dnsmasq-dns-567c455747-lrpch\" (UID: \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\") " pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.592894 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c6b10bf-1037-43b9-9327-e2095b2b72f8-dns-svc\") pod \"dnsmasq-dns-567c455747-lrpch\" (UID: \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\") " pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.627973 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.695660 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7m8h\" (UniqueName: \"kubernetes.io/projected/2c6b10bf-1037-43b9-9327-e2095b2b72f8-kube-api-access-b7m8h\") pod \"dnsmasq-dns-567c455747-lrpch\" (UID: \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\") " pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.695724 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c6b10bf-1037-43b9-9327-e2095b2b72f8-dns-svc\") pod \"dnsmasq-dns-567c455747-lrpch\" (UID: \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\") " pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.695810 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c6b10bf-1037-43b9-9327-e2095b2b72f8-config\") pod \"dnsmasq-dns-567c455747-lrpch\" (UID: \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\") " pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.696720 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c6b10bf-1037-43b9-9327-e2095b2b72f8-config\") pod \"dnsmasq-dns-567c455747-lrpch\" (UID: \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\") " pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.696966 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c6b10bf-1037-43b9-9327-e2095b2b72f8-dns-svc\") pod \"dnsmasq-dns-567c455747-lrpch\" (UID: \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\") " pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.714658 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7m8h\" (UniqueName: \"kubernetes.io/projected/2c6b10bf-1037-43b9-9327-e2095b2b72f8-kube-api-access-b7m8h\") pod \"dnsmasq-dns-567c455747-lrpch\" (UID: \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\") " pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:26 crc kubenswrapper[5002]: I1203 16:50:26.767435 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:27 crc kubenswrapper[5002]: I1203 16:50:27.014936 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-567c455747-lrpch"] Dec 03 16:50:27 crc kubenswrapper[5002]: W1203 16:50:27.023683 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c6b10bf_1037_43b9_9327_e2095b2b72f8.slice/crio-230021866fdc7c59d17d11d1d87ceab87d60adbf52d88ab7341a1ae53bd9e8eb WatchSource:0}: Error finding container 230021866fdc7c59d17d11d1d87ceab87d60adbf52d88ab7341a1ae53bd9e8eb: Status 404 returned error can't find the container with id 230021866fdc7c59d17d11d1d87ceab87d60adbf52d88ab7341a1ae53bd9e8eb Dec 03 16:50:27 crc kubenswrapper[5002]: I1203 16:50:27.115354 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cd484bb89-2lrvn"] Dec 03 16:50:27 crc kubenswrapper[5002]: W1203 16:50:27.125556 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49d1ed1f_3f24_4e15_8f32_da33a9f03b2c.slice/crio-b60f72a34e723804e1b5d1e6089dde4fce8bc85c2bca7b0fd727432a632c3a19 WatchSource:0}: Error finding container b60f72a34e723804e1b5d1e6089dde4fce8bc85c2bca7b0fd727432a632c3a19: Status 404 returned error can't find the container with id b60f72a34e723804e1b5d1e6089dde4fce8bc85c2bca7b0fd727432a632c3a19 Dec 03 16:50:27 crc kubenswrapper[5002]: I1203 16:50:27.406967 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" event={"ID":"49d1ed1f-3f24-4e15-8f32-da33a9f03b2c","Type":"ContainerStarted","Data":"b60f72a34e723804e1b5d1e6089dde4fce8bc85c2bca7b0fd727432a632c3a19"} Dec 03 16:50:27 crc kubenswrapper[5002]: I1203 16:50:27.408699 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-567c455747-lrpch" event={"ID":"2c6b10bf-1037-43b9-9327-e2095b2b72f8","Type":"ContainerStarted","Data":"230021866fdc7c59d17d11d1d87ceab87d60adbf52d88ab7341a1ae53bd9e8eb"} Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.209858 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-567c455747-lrpch"] Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.239830 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bc4b48fc9-dn4zf"] Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.241635 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.252911 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bc4b48fc9-dn4zf"] Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.354339 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cfe7179-2161-4aac-b4c4-cdeef9baed34-config\") pod \"dnsmasq-dns-bc4b48fc9-dn4zf\" (UID: \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\") " pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.354441 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvvqb\" (UniqueName: \"kubernetes.io/projected/8cfe7179-2161-4aac-b4c4-cdeef9baed34-kube-api-access-pvvqb\") pod \"dnsmasq-dns-bc4b48fc9-dn4zf\" (UID: \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\") " pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.354485 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cfe7179-2161-4aac-b4c4-cdeef9baed34-dns-svc\") pod \"dnsmasq-dns-bc4b48fc9-dn4zf\" (UID: \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\") " pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.455999 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvvqb\" (UniqueName: \"kubernetes.io/projected/8cfe7179-2161-4aac-b4c4-cdeef9baed34-kube-api-access-pvvqb\") pod \"dnsmasq-dns-bc4b48fc9-dn4zf\" (UID: \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\") " pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.456071 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cfe7179-2161-4aac-b4c4-cdeef9baed34-dns-svc\") pod \"dnsmasq-dns-bc4b48fc9-dn4zf\" (UID: \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\") " pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.456202 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cfe7179-2161-4aac-b4c4-cdeef9baed34-config\") pod \"dnsmasq-dns-bc4b48fc9-dn4zf\" (UID: \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\") " pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.457519 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cfe7179-2161-4aac-b4c4-cdeef9baed34-config\") pod \"dnsmasq-dns-bc4b48fc9-dn4zf\" (UID: \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\") " pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.458540 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cfe7179-2161-4aac-b4c4-cdeef9baed34-dns-svc\") pod \"dnsmasq-dns-bc4b48fc9-dn4zf\" (UID: \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\") " pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.503910 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvvqb\" (UniqueName: \"kubernetes.io/projected/8cfe7179-2161-4aac-b4c4-cdeef9baed34-kube-api-access-pvvqb\") pod \"dnsmasq-dns-bc4b48fc9-dn4zf\" (UID: \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\") " pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.588566 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.608045 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cd484bb89-2lrvn"] Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.622449 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cb666b895-qmpf6"] Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.623949 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.649459 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb666b895-qmpf6"] Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.767058 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-config\") pod \"dnsmasq-dns-cb666b895-qmpf6\" (UID: \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\") " pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.767118 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-dns-svc\") pod \"dnsmasq-dns-cb666b895-qmpf6\" (UID: \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\") " pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.767187 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8n5f\" (UniqueName: \"kubernetes.io/projected/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-kube-api-access-q8n5f\") pod \"dnsmasq-dns-cb666b895-qmpf6\" (UID: \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\") " pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.868562 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-config\") pod \"dnsmasq-dns-cb666b895-qmpf6\" (UID: \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\") " pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.868628 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-dns-svc\") pod \"dnsmasq-dns-cb666b895-qmpf6\" (UID: \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\") " pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.868723 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8n5f\" (UniqueName: \"kubernetes.io/projected/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-kube-api-access-q8n5f\") pod \"dnsmasq-dns-cb666b895-qmpf6\" (UID: \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\") " pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.869616 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-config\") pod \"dnsmasq-dns-cb666b895-qmpf6\" (UID: \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\") " pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.869845 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-dns-svc\") pod \"dnsmasq-dns-cb666b895-qmpf6\" (UID: \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\") " pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:50:29 crc kubenswrapper[5002]: I1203 16:50:29.896080 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8n5f\" (UniqueName: \"kubernetes.io/projected/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-kube-api-access-q8n5f\") pod \"dnsmasq-dns-cb666b895-qmpf6\" (UID: \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\") " pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.023289 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.225144 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bc4b48fc9-dn4zf"] Dec 03 16:50:30 crc kubenswrapper[5002]: W1203 16:50:30.235778 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8cfe7179_2161_4aac_b4c4_cdeef9baed34.slice/crio-153cc734b75a3805e74cb826c9c8a75f4750474160a209c158326b6949e9b43b WatchSource:0}: Error finding container 153cc734b75a3805e74cb826c9c8a75f4750474160a209c158326b6949e9b43b: Status 404 returned error can't find the container with id 153cc734b75a3805e74cb826c9c8a75f4750474160a209c158326b6949e9b43b Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.407965 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.418156 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.421134 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-w24m2" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.421833 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.422021 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.422110 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.422174 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.422030 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.423584 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.424781 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.467839 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" event={"ID":"8cfe7179-2161-4aac-b4c4-cdeef9baed34","Type":"ContainerStarted","Data":"153cc734b75a3805e74cb826c9c8a75f4750474160a209c158326b6949e9b43b"} Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.550833 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb666b895-qmpf6"] Dec 03 16:50:30 crc kubenswrapper[5002]: W1203 16:50:30.573308 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0c2ebd9_6979_42b4_9537_267e2a36a8c7.slice/crio-9575cd8bf32c7d556d1a35f1d170584748941163b1287420cb7a067c5b384c78 WatchSource:0}: Error finding container 9575cd8bf32c7d556d1a35f1d170584748941163b1287420cb7a067c5b384c78: Status 404 returned error can't find the container with id 9575cd8bf32c7d556d1a35f1d170584748941163b1287420cb7a067c5b384c78 Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.589467 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.589531 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.589556 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.589585 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-server-conf\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.589611 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz7gh\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-kube-api-access-tz7gh\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.589705 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/382d6556-c45b-43dd-a4fa-16b3e91e0725-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.589734 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.589967 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/382d6556-c45b-43dd-a4fa-16b3e91e0725-pod-info\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.590141 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.590294 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.590372 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.695865 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.695935 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.696011 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.696046 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-server-conf\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.696091 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz7gh\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-kube-api-access-tz7gh\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.696126 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/382d6556-c45b-43dd-a4fa-16b3e91e0725-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.696162 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.696195 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/382d6556-c45b-43dd-a4fa-16b3e91e0725-pod-info\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.696236 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.696280 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.696302 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.697049 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.697467 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.698663 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.699529 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.700223 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-server-conf\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.703159 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/382d6556-c45b-43dd-a4fa-16b3e91e0725-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.703593 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.704273 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/382d6556-c45b-43dd-a4fa-16b3e91e0725-pod-info\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.705497 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.720486 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.720736 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz7gh\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-kube-api-access-tz7gh\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.736514 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.760964 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.789119 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.816248 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.816443 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.824994 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.825245 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.825515 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.826913 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.828261 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.828435 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.835476 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-jvjp9" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.911936 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e7a598f1-0f32-448c-b08f-b5b0e70f583d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.912016 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e7a598f1-0f32-448c-b08f-b5b0e70f583d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.912048 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.912074 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.912124 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.912174 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.912268 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.912297 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.912342 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bn68v\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-kube-api-access-bn68v\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.912363 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:30 crc kubenswrapper[5002]: I1203 16:50:30.912432 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.013760 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bn68v\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-kube-api-access-bn68v\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.013813 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.013856 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.013874 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e7a598f1-0f32-448c-b08f-b5b0e70f583d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.013903 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e7a598f1-0f32-448c-b08f-b5b0e70f583d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.013921 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.013941 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.013975 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.014003 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.014034 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.014055 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.014707 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.015655 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.015776 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.017321 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.017553 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.018274 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.021766 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.031949 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.031974 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e7a598f1-0f32-448c-b08f-b5b0e70f583d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.036398 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e7a598f1-0f32-448c-b08f-b5b0e70f583d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.040761 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bn68v\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-kube-api-access-bn68v\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.069122 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.159787 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.370732 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.482786 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" event={"ID":"f0c2ebd9-6979-42b4-9537-267e2a36a8c7","Type":"ContainerStarted","Data":"9575cd8bf32c7d556d1a35f1d170584748941163b1287420cb7a067c5b384c78"} Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.484414 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"382d6556-c45b-43dd-a4fa-16b3e91e0725","Type":"ContainerStarted","Data":"d2f08efd4e97d1e3035b6d1be2e152b6fe41a7383fa709fefc5c3a2004dfbffd"} Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.671925 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:50:31 crc kubenswrapper[5002]: W1203 16:50:31.691711 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7a598f1_0f32_448c_b08f_b5b0e70f583d.slice/crio-d07315ea7de4f54cb5c2b58d32056f7b796e0c7d326364b64e451ae7e2db2f19 WatchSource:0}: Error finding container d07315ea7de4f54cb5c2b58d32056f7b796e0c7d326364b64e451ae7e2db2f19: Status 404 returned error can't find the container with id d07315ea7de4f54cb5c2b58d32056f7b796e0c7d326364b64e451ae7e2db2f19 Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.972803 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.974500 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.978895 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.984975 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-4ch84" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.985647 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.985929 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.994740 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 16:50:31 crc kubenswrapper[5002]: I1203 16:50:31.997221 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.033425 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/60743dc4-9a30-4fd2-80c1-0c7427241e92-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.033983 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-config-data-default\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.034017 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-operator-scripts\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.034048 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-kolla-config\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.034076 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/60743dc4-9a30-4fd2-80c1-0c7427241e92-config-data-generated\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.034114 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.034139 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj2kv\" (UniqueName: \"kubernetes.io/projected/60743dc4-9a30-4fd2-80c1-0c7427241e92-kube-api-access-sj2kv\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.034602 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60743dc4-9a30-4fd2-80c1-0c7427241e92-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.135832 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60743dc4-9a30-4fd2-80c1-0c7427241e92-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.135896 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/60743dc4-9a30-4fd2-80c1-0c7427241e92-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.135924 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-config-data-default\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.135950 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-operator-scripts\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.135979 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-kolla-config\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.135999 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/60743dc4-9a30-4fd2-80c1-0c7427241e92-config-data-generated\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.136021 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.136039 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj2kv\" (UniqueName: \"kubernetes.io/projected/60743dc4-9a30-4fd2-80c1-0c7427241e92-kube-api-access-sj2kv\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.136831 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.137056 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/60743dc4-9a30-4fd2-80c1-0c7427241e92-config-data-generated\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.137813 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-kolla-config\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.137895 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-config-data-default\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.139259 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-operator-scripts\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.144482 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/60743dc4-9a30-4fd2-80c1-0c7427241e92-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.144775 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60743dc4-9a30-4fd2-80c1-0c7427241e92-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.163248 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.164243 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj2kv\" (UniqueName: \"kubernetes.io/projected/60743dc4-9a30-4fd2-80c1-0c7427241e92-kube-api-access-sj2kv\") pod \"openstack-galera-0\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.307923 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.498216 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7a598f1-0f32-448c-b08f-b5b0e70f583d","Type":"ContainerStarted","Data":"d07315ea7de4f54cb5c2b58d32056f7b796e0c7d326364b64e451ae7e2db2f19"} Dec 03 16:50:32 crc kubenswrapper[5002]: I1203 16:50:32.555056 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.334224 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.337057 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.340708 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.340868 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-wx97g" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.352585 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.368959 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.379793 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.507158 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"60743dc4-9a30-4fd2-80c1-0c7427241e92","Type":"ContainerStarted","Data":"12111a4a3befceccbaf0cdd0c492b228d6afde8467a782aedb5f6fb3a1562a1d"} Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.529287 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.529359 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b132eed4-cb4d-4abc-b49a-55688686288d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.529445 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvc54\" (UniqueName: \"kubernetes.io/projected/b132eed4-cb4d-4abc-b49a-55688686288d-kube-api-access-nvc54\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.529844 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.529918 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b132eed4-cb4d-4abc-b49a-55688686288d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.529964 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.530032 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.530054 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b132eed4-cb4d-4abc-b49a-55688686288d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.632155 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.632222 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b132eed4-cb4d-4abc-b49a-55688686288d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.632255 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.632292 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.632319 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b132eed4-cb4d-4abc-b49a-55688686288d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.632357 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.632391 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b132eed4-cb4d-4abc-b49a-55688686288d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.632455 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvc54\" (UniqueName: \"kubernetes.io/projected/b132eed4-cb4d-4abc-b49a-55688686288d-kube-api-access-nvc54\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.632662 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.632892 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b132eed4-cb4d-4abc-b49a-55688686288d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.633422 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.633804 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.635009 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.640250 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b132eed4-cb4d-4abc-b49a-55688686288d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.648955 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b132eed4-cb4d-4abc-b49a-55688686288d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.656536 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.665386 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvc54\" (UniqueName: \"kubernetes.io/projected/b132eed4-cb4d-4abc-b49a-55688686288d-kube-api-access-nvc54\") pod \"openstack-cell1-galera-0\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.693240 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.701854 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.711888 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.719684 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.720088 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.720804 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-5m4hr" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.736327 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.835948 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/852cfff4-0855-40ab-a82d-b560c37118bf-kolla-config\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.836023 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw29s\" (UniqueName: \"kubernetes.io/projected/852cfff4-0855-40ab-a82d-b560c37118bf-kube-api-access-qw29s\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.836053 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/852cfff4-0855-40ab-a82d-b560c37118bf-config-data\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.836161 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/852cfff4-0855-40ab-a82d-b560c37118bf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.836219 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/852cfff4-0855-40ab-a82d-b560c37118bf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.937985 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/852cfff4-0855-40ab-a82d-b560c37118bf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.938096 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/852cfff4-0855-40ab-a82d-b560c37118bf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.938180 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/852cfff4-0855-40ab-a82d-b560c37118bf-kolla-config\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.938223 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw29s\" (UniqueName: \"kubernetes.io/projected/852cfff4-0855-40ab-a82d-b560c37118bf-kube-api-access-qw29s\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.938255 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/852cfff4-0855-40ab-a82d-b560c37118bf-config-data\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.943916 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/852cfff4-0855-40ab-a82d-b560c37118bf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:33 crc kubenswrapper[5002]: I1203 16:50:33.944631 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/852cfff4-0855-40ab-a82d-b560c37118bf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:34 crc kubenswrapper[5002]: I1203 16:50:34.191281 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 16:50:34 crc kubenswrapper[5002]: I1203 16:50:34.300801 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/852cfff4-0855-40ab-a82d-b560c37118bf-kolla-config\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:34 crc kubenswrapper[5002]: I1203 16:50:34.301768 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/852cfff4-0855-40ab-a82d-b560c37118bf-config-data\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:34 crc kubenswrapper[5002]: I1203 16:50:34.307825 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw29s\" (UniqueName: \"kubernetes.io/projected/852cfff4-0855-40ab-a82d-b560c37118bf-kube-api-access-qw29s\") pod \"memcached-0\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " pod="openstack/memcached-0" Dec 03 16:50:34 crc kubenswrapper[5002]: I1203 16:50:34.392997 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 16:50:34 crc kubenswrapper[5002]: I1203 16:50:34.531199 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b132eed4-cb4d-4abc-b49a-55688686288d","Type":"ContainerStarted","Data":"952dc21f12f66b37c4abc6cfb2a7cbd91ac9fe4cf84948f1a14010e80be6ae18"} Dec 03 16:50:34 crc kubenswrapper[5002]: I1203 16:50:34.921505 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 16:50:35 crc kubenswrapper[5002]: I1203 16:50:35.473351 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:50:35 crc kubenswrapper[5002]: I1203 16:50:35.475184 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:50:35 crc kubenswrapper[5002]: I1203 16:50:35.479428 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-h74ml" Dec 03 16:50:35 crc kubenswrapper[5002]: I1203 16:50:35.482130 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:50:35 crc kubenswrapper[5002]: I1203 16:50:35.542588 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"852cfff4-0855-40ab-a82d-b560c37118bf","Type":"ContainerStarted","Data":"140dc0384c83fe9ebfee94174d975f61001d65a677e4fd2c13ff06e0213532ce"} Dec 03 16:50:35 crc kubenswrapper[5002]: I1203 16:50:35.574428 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64lnz\" (UniqueName: \"kubernetes.io/projected/84ace4a6-f88f-4b88-8bd9-62440a00df18-kube-api-access-64lnz\") pod \"kube-state-metrics-0\" (UID: \"84ace4a6-f88f-4b88-8bd9-62440a00df18\") " pod="openstack/kube-state-metrics-0" Dec 03 16:50:35 crc kubenswrapper[5002]: I1203 16:50:35.676498 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64lnz\" (UniqueName: \"kubernetes.io/projected/84ace4a6-f88f-4b88-8bd9-62440a00df18-kube-api-access-64lnz\") pod \"kube-state-metrics-0\" (UID: \"84ace4a6-f88f-4b88-8bd9-62440a00df18\") " pod="openstack/kube-state-metrics-0" Dec 03 16:50:35 crc kubenswrapper[5002]: I1203 16:50:35.700398 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64lnz\" (UniqueName: \"kubernetes.io/projected/84ace4a6-f88f-4b88-8bd9-62440a00df18-kube-api-access-64lnz\") pod \"kube-state-metrics-0\" (UID: \"84ace4a6-f88f-4b88-8bd9-62440a00df18\") " pod="openstack/kube-state-metrics-0" Dec 03 16:50:35 crc kubenswrapper[5002]: I1203 16:50:35.796577 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.811012 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-hnkdk"] Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.812644 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.815966 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.816739 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.817097 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-tw68h" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.820859 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hnkdk"] Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.853114 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-log-ovn\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.854194 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2e876c11-14f1-4e51-90a1-e2cdddc08c87-scripts\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.854267 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-run\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.854836 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-run-ovn\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.854914 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e876c11-14f1-4e51-90a1-e2cdddc08c87-combined-ca-bundle\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.855001 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e876c11-14f1-4e51-90a1-e2cdddc08c87-ovn-controller-tls-certs\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.855067 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l8ll\" (UniqueName: \"kubernetes.io/projected/2e876c11-14f1-4e51-90a1-e2cdddc08c87-kube-api-access-5l8ll\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.903282 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-jkwrt"] Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.906448 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.920361 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-jkwrt"] Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957310 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5cc28df-be84-4c87-b0fc-a523c5a23395-scripts\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957385 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-log-ovn\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957444 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-log\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957480 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2e876c11-14f1-4e51-90a1-e2cdddc08c87-scripts\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957506 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-lib\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957542 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-run\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957580 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctrfl\" (UniqueName: \"kubernetes.io/projected/f5cc28df-be84-4c87-b0fc-a523c5a23395-kube-api-access-ctrfl\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957613 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-run\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957643 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-run-ovn\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957681 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e876c11-14f1-4e51-90a1-e2cdddc08c87-combined-ca-bundle\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957710 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e876c11-14f1-4e51-90a1-e2cdddc08c87-ovn-controller-tls-certs\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957758 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-etc-ovs\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.957790 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l8ll\" (UniqueName: \"kubernetes.io/projected/2e876c11-14f1-4e51-90a1-e2cdddc08c87-kube-api-access-5l8ll\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.959107 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-log-ovn\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.961929 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2e876c11-14f1-4e51-90a1-e2cdddc08c87-scripts\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.962117 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-run\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.962387 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-run-ovn\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.969842 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e876c11-14f1-4e51-90a1-e2cdddc08c87-combined-ca-bundle\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.969877 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e876c11-14f1-4e51-90a1-e2cdddc08c87-ovn-controller-tls-certs\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:38 crc kubenswrapper[5002]: I1203 16:50:38.980967 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l8ll\" (UniqueName: \"kubernetes.io/projected/2e876c11-14f1-4e51-90a1-e2cdddc08c87-kube-api-access-5l8ll\") pod \"ovn-controller-hnkdk\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.059453 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-log\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.060196 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-lib\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.060493 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-run\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.060574 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctrfl\" (UniqueName: \"kubernetes.io/projected/f5cc28df-be84-4c87-b0fc-a523c5a23395-kube-api-access-ctrfl\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.060712 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-etc-ovs\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.060874 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5cc28df-be84-4c87-b0fc-a523c5a23395-scripts\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.060442 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-lib\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.060129 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-log\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.061117 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-run\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.061399 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-etc-ovs\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.063858 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5cc28df-be84-4c87-b0fc-a523c5a23395-scripts\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.084258 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctrfl\" (UniqueName: \"kubernetes.io/projected/f5cc28df-be84-4c87-b0fc-a523c5a23395-kube-api-access-ctrfl\") pod \"ovn-controller-ovs-jkwrt\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.146045 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hnkdk" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.230575 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.648884 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.651353 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.656937 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.657171 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.657348 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.657674 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.657866 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-2xnv6" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.685644 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.772729 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.772804 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.773020 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-config\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.773114 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.773189 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.773217 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gjnr\" (UniqueName: \"kubernetes.io/projected/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-kube-api-access-8gjnr\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.773247 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.773437 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.875348 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.875415 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.875463 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-config\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.875494 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.875553 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.875581 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gjnr\" (UniqueName: \"kubernetes.io/projected/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-kube-api-access-8gjnr\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.875609 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.875837 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.876096 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.876578 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.876839 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-config\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.878285 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.881120 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.883260 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.891446 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.895830 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gjnr\" (UniqueName: \"kubernetes.io/projected/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-kube-api-access-8gjnr\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.903159 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:39 crc kubenswrapper[5002]: I1203 16:50:39.974642 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.810563 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.813173 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.816731 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.816973 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.817166 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-sjs2l" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.817299 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.853105 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.940840 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.940887 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.940938 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-config\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.940961 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.941000 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfcv5\" (UniqueName: \"kubernetes.io/projected/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-kube-api-access-bfcv5\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.941334 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.941371 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:42 crc kubenswrapper[5002]: I1203 16:50:42.941392 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.043308 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfcv5\" (UniqueName: \"kubernetes.io/projected/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-kube-api-access-bfcv5\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.043428 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.043463 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.043487 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.043533 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.043555 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.043590 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-config\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.043614 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.043884 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.044482 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.045007 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.046782 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-config\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.053570 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.061786 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.062698 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.063502 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfcv5\" (UniqueName: \"kubernetes.io/projected/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-kube-api-access-bfcv5\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.078341 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:43 crc kubenswrapper[5002]: I1203 16:50:43.139409 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 16:50:45 crc kubenswrapper[5002]: I1203 16:50:45.556020 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 16:50:54 crc kubenswrapper[5002]: I1203 16:50:54.743016 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"2f69c54d-bd52-413b-86b6-6b5c4ca765ba","Type":"ContainerStarted","Data":"fb81bfa6aa5abf78d199d7502ff93ca2c296eb1b7c137e553c06af9dc461679d"} Dec 03 16:50:54 crc kubenswrapper[5002]: E1203 16:50:54.758417 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:42f5663a161307156673f86e5eaad59f842a4bf25824f48008e69ab18e4ba792" Dec 03 16:50:54 crc kubenswrapper[5002]: E1203 16:50:54.758658 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:42f5663a161307156673f86e5eaad59f842a4bf25824f48008e69ab18e4ba792,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nhbg6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5cd484bb89-2lrvn_openstack(49d1ed1f-3f24-4e15-8f32-da33a9f03b2c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:50:54 crc kubenswrapper[5002]: E1203 16:50:54.759919 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" podUID="49d1ed1f-3f24-4e15-8f32-da33a9f03b2c" Dec 03 16:50:54 crc kubenswrapper[5002]: E1203 16:50:54.825822 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:42f5663a161307156673f86e5eaad59f842a4bf25824f48008e69ab18e4ba792" Dec 03 16:50:54 crc kubenswrapper[5002]: E1203 16:50:54.826083 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:42f5663a161307156673f86e5eaad59f842a4bf25824f48008e69ab18e4ba792,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b7m8h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-567c455747-lrpch_openstack(2c6b10bf-1037-43b9-9327-e2095b2b72f8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:50:54 crc kubenswrapper[5002]: E1203 16:50:54.827330 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-567c455747-lrpch" podUID="2c6b10bf-1037-43b9-9327-e2095b2b72f8" Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.388379 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:50:55 crc kubenswrapper[5002]: W1203 16:50:55.407667 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84ace4a6_f88f_4b88_8bd9_62440a00df18.slice/crio-15aee18692511b6d9da9d6b3d885f9aa3bdd1bb35685566bd8c1890838ac53b6 WatchSource:0}: Error finding container 15aee18692511b6d9da9d6b3d885f9aa3bdd1bb35685566bd8c1890838ac53b6: Status 404 returned error can't find the container with id 15aee18692511b6d9da9d6b3d885f9aa3bdd1bb35685566bd8c1890838ac53b6 Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.483612 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hnkdk"] Dec 03 16:50:55 crc kubenswrapper[5002]: W1203 16:50:55.517489 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e876c11_14f1_4e51_90a1_e2cdddc08c87.slice/crio-5e65b7c3398269db4275ffac45374421277a1470bce66ee06a8533a0f24586b4 WatchSource:0}: Error finding container 5e65b7c3398269db4275ffac45374421277a1470bce66ee06a8533a0f24586b4: Status 404 returned error can't find the container with id 5e65b7c3398269db4275ffac45374421277a1470bce66ee06a8533a0f24586b4 Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.600937 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-jkwrt"] Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.673424 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.754363 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hnkdk" event={"ID":"2e876c11-14f1-4e51-90a1-e2cdddc08c87","Type":"ContainerStarted","Data":"5e65b7c3398269db4275ffac45374421277a1470bce66ee06a8533a0f24586b4"} Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.756800 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"60743dc4-9a30-4fd2-80c1-0c7427241e92","Type":"ContainerStarted","Data":"de9f69bc28ad832b164dffa53cef1b38dfffdb1897a1751ab82cb482a9f7e4c3"} Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.760365 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b132eed4-cb4d-4abc-b49a-55688686288d","Type":"ContainerStarted","Data":"c1428bf30cb15b69700ca7aee245af99d1b61186e96d67ee9c68f2ca7bb320c7"} Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.763825 5002 generic.go:334] "Generic (PLEG): container finished" podID="8cfe7179-2161-4aac-b4c4-cdeef9baed34" containerID="1f6316af561546b5f9f106108beeac82fb9451cdb2bf90d4db146bd24fff176a" exitCode=0 Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.763903 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" event={"ID":"8cfe7179-2161-4aac-b4c4-cdeef9baed34","Type":"ContainerDied","Data":"1f6316af561546b5f9f106108beeac82fb9451cdb2bf90d4db146bd24fff176a"} Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.765605 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"852cfff4-0855-40ab-a82d-b560c37118bf","Type":"ContainerStarted","Data":"2ba9a566a71354e8ef6bda7b9341c58e4f1b4fb5649b671ff2b3c91f0cb5d63d"} Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.766188 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.767195 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jkwrt" event={"ID":"f5cc28df-be84-4c87-b0fc-a523c5a23395","Type":"ContainerStarted","Data":"8fb0fd59b2e0c33c594e3ce22470dc3a2f2b885cdcfc07dddb8879cca8b1fd1a"} Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.768787 5002 generic.go:334] "Generic (PLEG): container finished" podID="f0c2ebd9-6979-42b4-9537-267e2a36a8c7" containerID="ad00b6a4b0a33208cd7d6a05afa37a06cbbfe8602a860af70dfdb6abee90acd3" exitCode=0 Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.768827 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" event={"ID":"f0c2ebd9-6979-42b4-9537-267e2a36a8c7","Type":"ContainerDied","Data":"ad00b6a4b0a33208cd7d6a05afa37a06cbbfe8602a860af70dfdb6abee90acd3"} Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.771251 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"84ace4a6-f88f-4b88-8bd9-62440a00df18","Type":"ContainerStarted","Data":"15aee18692511b6d9da9d6b3d885f9aa3bdd1bb35685566bd8c1890838ac53b6"} Dec 03 16:50:55 crc kubenswrapper[5002]: I1203 16:50:55.951688 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.927640737 podStartE2EDuration="22.951661113s" podCreationTimestamp="2025-12-03 16:50:33 +0000 UTC" firstStartedPulling="2025-12-03 16:50:34.912580294 +0000 UTC m=+1158.326402222" lastFinishedPulling="2025-12-03 16:50:54.93660071 +0000 UTC m=+1178.350422598" observedRunningTime="2025-12-03 16:50:55.929012217 +0000 UTC m=+1179.342834135" watchObservedRunningTime="2025-12-03 16:50:55.951661113 +0000 UTC m=+1179.365482991" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.084114 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.132902 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhbg6\" (UniqueName: \"kubernetes.io/projected/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c-kube-api-access-nhbg6\") pod \"49d1ed1f-3f24-4e15-8f32-da33a9f03b2c\" (UID: \"49d1ed1f-3f24-4e15-8f32-da33a9f03b2c\") " Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.132995 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c-config\") pod \"49d1ed1f-3f24-4e15-8f32-da33a9f03b2c\" (UID: \"49d1ed1f-3f24-4e15-8f32-da33a9f03b2c\") " Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.135354 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c-config" (OuterVolumeSpecName: "config") pod "49d1ed1f-3f24-4e15-8f32-da33a9f03b2c" (UID: "49d1ed1f-3f24-4e15-8f32-da33a9f03b2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.193478 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.236279 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.242908 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c-kube-api-access-nhbg6" (OuterVolumeSpecName: "kube-api-access-nhbg6") pod "49d1ed1f-3f24-4e15-8f32-da33a9f03b2c" (UID: "49d1ed1f-3f24-4e15-8f32-da33a9f03b2c"). InnerVolumeSpecName "kube-api-access-nhbg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.338156 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c6b10bf-1037-43b9-9327-e2095b2b72f8-config\") pod \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\" (UID: \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\") " Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.338446 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c6b10bf-1037-43b9-9327-e2095b2b72f8-dns-svc\") pod \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\" (UID: \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\") " Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.338536 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7m8h\" (UniqueName: \"kubernetes.io/projected/2c6b10bf-1037-43b9-9327-e2095b2b72f8-kube-api-access-b7m8h\") pod \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\" (UID: \"2c6b10bf-1037-43b9-9327-e2095b2b72f8\") " Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.338914 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c6b10bf-1037-43b9-9327-e2095b2b72f8-config" (OuterVolumeSpecName: "config") pod "2c6b10bf-1037-43b9-9327-e2095b2b72f8" (UID: "2c6b10bf-1037-43b9-9327-e2095b2b72f8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.338987 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhbg6\" (UniqueName: \"kubernetes.io/projected/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c-kube-api-access-nhbg6\") on node \"crc\" DevicePath \"\"" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.340992 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c6b10bf-1037-43b9-9327-e2095b2b72f8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2c6b10bf-1037-43b9-9327-e2095b2b72f8" (UID: "2c6b10bf-1037-43b9-9327-e2095b2b72f8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.343675 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c6b10bf-1037-43b9-9327-e2095b2b72f8-kube-api-access-b7m8h" (OuterVolumeSpecName: "kube-api-access-b7m8h") pod "2c6b10bf-1037-43b9-9327-e2095b2b72f8" (UID: "2c6b10bf-1037-43b9-9327-e2095b2b72f8"). InnerVolumeSpecName "kube-api-access-b7m8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.441339 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c6b10bf-1037-43b9-9327-e2095b2b72f8-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.441383 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c6b10bf-1037-43b9-9327-e2095b2b72f8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.441396 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7m8h\" (UniqueName: \"kubernetes.io/projected/2c6b10bf-1037-43b9-9327-e2095b2b72f8-kube-api-access-b7m8h\") on node \"crc\" DevicePath \"\"" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.781200 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" event={"ID":"49d1ed1f-3f24-4e15-8f32-da33a9f03b2c","Type":"ContainerDied","Data":"b60f72a34e723804e1b5d1e6089dde4fce8bc85c2bca7b0fd727432a632c3a19"} Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.781269 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd484bb89-2lrvn" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.784145 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-567c455747-lrpch" event={"ID":"2c6b10bf-1037-43b9-9327-e2095b2b72f8","Type":"ContainerDied","Data":"230021866fdc7c59d17d11d1d87ceab87d60adbf52d88ab7341a1ae53bd9e8eb"} Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.784168 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-567c455747-lrpch" Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.817598 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"9eacefa0-a1f4-4181-ab8e-201efd0fc67e","Type":"ContainerStarted","Data":"93f6e107bbb3c4a0c3a783debd48c067ea7aa8d27f0862ea8122e14c6544a00a"} Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.968964 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cd484bb89-2lrvn"] Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.974248 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5cd484bb89-2lrvn"] Dec 03 16:50:56 crc kubenswrapper[5002]: I1203 16:50:56.999273 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-567c455747-lrpch"] Dec 03 16:50:57 crc kubenswrapper[5002]: I1203 16:50:57.005815 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-567c455747-lrpch"] Dec 03 16:50:58 crc kubenswrapper[5002]: I1203 16:50:58.866979 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c6b10bf-1037-43b9-9327-e2095b2b72f8" path="/var/lib/kubelet/pods/2c6b10bf-1037-43b9-9327-e2095b2b72f8/volumes" Dec 03 16:50:58 crc kubenswrapper[5002]: I1203 16:50:58.868786 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49d1ed1f-3f24-4e15-8f32-da33a9f03b2c" path="/var/lib/kubelet/pods/49d1ed1f-3f24-4e15-8f32-da33a9f03b2c/volumes" Dec 03 16:51:02 crc kubenswrapper[5002]: I1203 16:51:01.877366 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" event={"ID":"8cfe7179-2161-4aac-b4c4-cdeef9baed34","Type":"ContainerStarted","Data":"a2fc0440e9074fc53c20561442d14c83c96843538d576159475c9558f6c647cb"} Dec 03 16:51:02 crc kubenswrapper[5002]: I1203 16:51:02.889872 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7a598f1-0f32-448c-b08f-b5b0e70f583d","Type":"ContainerStarted","Data":"5caff8192bc34681b3f0760b1b195bcfacf6add52d3a669e3d1acea1cb2ca939"} Dec 03 16:51:02 crc kubenswrapper[5002]: I1203 16:51:02.893508 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"382d6556-c45b-43dd-a4fa-16b3e91e0725","Type":"ContainerStarted","Data":"4ea3eb5ddafb1a5617812d42da8c949aec8550f891409d4712a4994fb7e7c919"} Dec 03 16:51:02 crc kubenswrapper[5002]: I1203 16:51:02.902700 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" event={"ID":"f0c2ebd9-6979-42b4-9537-267e2a36a8c7","Type":"ContainerStarted","Data":"97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da"} Dec 03 16:51:02 crc kubenswrapper[5002]: I1203 16:51:02.902960 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:51:02 crc kubenswrapper[5002]: I1203 16:51:02.902996 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:51:02 crc kubenswrapper[5002]: I1203 16:51:02.983958 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" podStartSLOduration=9.58275054 podStartE2EDuration="33.983927828s" podCreationTimestamp="2025-12-03 16:50:29 +0000 UTC" firstStartedPulling="2025-12-03 16:50:30.576881891 +0000 UTC m=+1153.990703779" lastFinishedPulling="2025-12-03 16:50:54.978059179 +0000 UTC m=+1178.391881067" observedRunningTime="2025-12-03 16:51:02.970469802 +0000 UTC m=+1186.384291730" watchObservedRunningTime="2025-12-03 16:51:02.983927828 +0000 UTC m=+1186.397749746" Dec 03 16:51:04 crc kubenswrapper[5002]: I1203 16:51:04.395853 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 03 16:51:04 crc kubenswrapper[5002]: I1203 16:51:04.421598 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" podStartSLOduration=10.682298644 podStartE2EDuration="35.421571655s" podCreationTimestamp="2025-12-03 16:50:29 +0000 UTC" firstStartedPulling="2025-12-03 16:50:30.239536377 +0000 UTC m=+1153.653358265" lastFinishedPulling="2025-12-03 16:50:54.978809388 +0000 UTC m=+1178.392631276" observedRunningTime="2025-12-03 16:51:03.006174063 +0000 UTC m=+1186.419995961" watchObservedRunningTime="2025-12-03 16:51:04.421571655 +0000 UTC m=+1187.835393543" Dec 03 16:51:05 crc kubenswrapper[5002]: I1203 16:51:05.952757 5002 generic.go:334] "Generic (PLEG): container finished" podID="60743dc4-9a30-4fd2-80c1-0c7427241e92" containerID="de9f69bc28ad832b164dffa53cef1b38dfffdb1897a1751ab82cb482a9f7e4c3" exitCode=0 Dec 03 16:51:05 crc kubenswrapper[5002]: I1203 16:51:05.952851 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"60743dc4-9a30-4fd2-80c1-0c7427241e92","Type":"ContainerDied","Data":"de9f69bc28ad832b164dffa53cef1b38dfffdb1897a1751ab82cb482a9f7e4c3"} Dec 03 16:51:05 crc kubenswrapper[5002]: I1203 16:51:05.961613 5002 generic.go:334] "Generic (PLEG): container finished" podID="b132eed4-cb4d-4abc-b49a-55688686288d" containerID="c1428bf30cb15b69700ca7aee245af99d1b61186e96d67ee9c68f2ca7bb320c7" exitCode=0 Dec 03 16:51:05 crc kubenswrapper[5002]: I1203 16:51:05.961686 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b132eed4-cb4d-4abc-b49a-55688686288d","Type":"ContainerDied","Data":"c1428bf30cb15b69700ca7aee245af99d1b61186e96d67ee9c68f2ca7bb320c7"} Dec 03 16:51:05 crc kubenswrapper[5002]: I1203 16:51:05.961725 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bc4b48fc9-dn4zf"] Dec 03 16:51:05 crc kubenswrapper[5002]: I1203 16:51:05.961972 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" podUID="8cfe7179-2161-4aac-b4c4-cdeef9baed34" containerName="dnsmasq-dns" containerID="cri-o://a2fc0440e9074fc53c20561442d14c83c96843538d576159475c9558f6c647cb" gracePeriod=10 Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.084359 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66c567d66c-5g2mp"] Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.090067 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.138111 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66c567d66c-5g2mp"] Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.262132 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63caefa0-1cea-4ecc-b8e4-6d579f56413c-config\") pod \"dnsmasq-dns-66c567d66c-5g2mp\" (UID: \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\") " pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.262198 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63caefa0-1cea-4ecc-b8e4-6d579f56413c-dns-svc\") pod \"dnsmasq-dns-66c567d66c-5g2mp\" (UID: \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\") " pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.262227 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsqk2\" (UniqueName: \"kubernetes.io/projected/63caefa0-1cea-4ecc-b8e4-6d579f56413c-kube-api-access-nsqk2\") pod \"dnsmasq-dns-66c567d66c-5g2mp\" (UID: \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\") " pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.364277 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63caefa0-1cea-4ecc-b8e4-6d579f56413c-config\") pod \"dnsmasq-dns-66c567d66c-5g2mp\" (UID: \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\") " pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.364361 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63caefa0-1cea-4ecc-b8e4-6d579f56413c-dns-svc\") pod \"dnsmasq-dns-66c567d66c-5g2mp\" (UID: \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\") " pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.364386 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsqk2\" (UniqueName: \"kubernetes.io/projected/63caefa0-1cea-4ecc-b8e4-6d579f56413c-kube-api-access-nsqk2\") pod \"dnsmasq-dns-66c567d66c-5g2mp\" (UID: \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\") " pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.365507 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63caefa0-1cea-4ecc-b8e4-6d579f56413c-config\") pod \"dnsmasq-dns-66c567d66c-5g2mp\" (UID: \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\") " pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.365545 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63caefa0-1cea-4ecc-b8e4-6d579f56413c-dns-svc\") pod \"dnsmasq-dns-66c567d66c-5g2mp\" (UID: \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\") " pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.386501 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsqk2\" (UniqueName: \"kubernetes.io/projected/63caefa0-1cea-4ecc-b8e4-6d579f56413c-kube-api-access-nsqk2\") pod \"dnsmasq-dns-66c567d66c-5g2mp\" (UID: \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\") " pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.416737 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.986368 5002 generic.go:334] "Generic (PLEG): container finished" podID="8cfe7179-2161-4aac-b4c4-cdeef9baed34" containerID="a2fc0440e9074fc53c20561442d14c83c96843538d576159475c9558f6c647cb" exitCode=0 Dec 03 16:51:06 crc kubenswrapper[5002]: I1203 16:51:06.986464 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" event={"ID":"8cfe7179-2161-4aac-b4c4-cdeef9baed34","Type":"ContainerDied","Data":"a2fc0440e9074fc53c20561442d14c83c96843538d576159475c9558f6c647cb"} Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.156554 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.167502 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.167680 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.173092 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.173384 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-tjdlq" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.173532 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.173647 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.176289 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.279821 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvvqb\" (UniqueName: \"kubernetes.io/projected/8cfe7179-2161-4aac-b4c4-cdeef9baed34-kube-api-access-pvvqb\") pod \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\" (UID: \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\") " Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.280236 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cfe7179-2161-4aac-b4c4-cdeef9baed34-config\") pod \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\" (UID: \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\") " Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.280408 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cfe7179-2161-4aac-b4c4-cdeef9baed34-dns-svc\") pod \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\" (UID: \"8cfe7179-2161-4aac-b4c4-cdeef9baed34\") " Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.280732 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f090a614-3703-461c-8152-226a5b53c16a-cache\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.310697 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.310908 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.311031 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f090a614-3703-461c-8152-226a5b53c16a-lock\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.311197 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz5kk\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-kube-api-access-lz5kk\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.317527 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cfe7179-2161-4aac-b4c4-cdeef9baed34-kube-api-access-pvvqb" (OuterVolumeSpecName: "kube-api-access-pvvqb") pod "8cfe7179-2161-4aac-b4c4-cdeef9baed34" (UID: "8cfe7179-2161-4aac-b4c4-cdeef9baed34"). InnerVolumeSpecName "kube-api-access-pvvqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.356947 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cfe7179-2161-4aac-b4c4-cdeef9baed34-config" (OuterVolumeSpecName: "config") pod "8cfe7179-2161-4aac-b4c4-cdeef9baed34" (UID: "8cfe7179-2161-4aac-b4c4-cdeef9baed34"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.395806 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-zcdkn"] Dec 03 16:51:07 crc kubenswrapper[5002]: E1203 16:51:07.396224 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cfe7179-2161-4aac-b4c4-cdeef9baed34" containerName="init" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.396240 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cfe7179-2161-4aac-b4c4-cdeef9baed34" containerName="init" Dec 03 16:51:07 crc kubenswrapper[5002]: E1203 16:51:07.396269 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cfe7179-2161-4aac-b4c4-cdeef9baed34" containerName="dnsmasq-dns" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.396275 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cfe7179-2161-4aac-b4c4-cdeef9baed34" containerName="dnsmasq-dns" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.396450 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cfe7179-2161-4aac-b4c4-cdeef9baed34" containerName="dnsmasq-dns" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.397163 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.402316 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.402500 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.402886 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.412558 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f090a614-3703-461c-8152-226a5b53c16a-lock\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.412635 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz5kk\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-kube-api-access-lz5kk\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.412910 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f090a614-3703-461c-8152-226a5b53c16a-cache\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.412953 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.412974 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.413022 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvvqb\" (UniqueName: \"kubernetes.io/projected/8cfe7179-2161-4aac-b4c4-cdeef9baed34-kube-api-access-pvvqb\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.413034 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cfe7179-2161-4aac-b4c4-cdeef9baed34-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:07 crc kubenswrapper[5002]: E1203 16:51:07.413143 5002 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 16:51:07 crc kubenswrapper[5002]: E1203 16:51:07.413158 5002 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 16:51:07 crc kubenswrapper[5002]: E1203 16:51:07.413223 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift podName:f090a614-3703-461c-8152-226a5b53c16a nodeName:}" failed. No retries permitted until 2025-12-03 16:51:07.913198215 +0000 UTC m=+1191.327020103 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift") pod "swift-storage-0" (UID: "f090a614-3703-461c-8152-226a5b53c16a") : configmap "swift-ring-files" not found Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.413305 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f090a614-3703-461c-8152-226a5b53c16a-cache\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.413486 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.413918 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f090a614-3703-461c-8152-226a5b53c16a-lock\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.418638 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-zcdkn"] Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.425947 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66c567d66c-5g2mp"] Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.448668 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz5kk\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-kube-api-access-lz5kk\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.473257 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cfe7179-2161-4aac-b4c4-cdeef9baed34-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8cfe7179-2161-4aac-b4c4-cdeef9baed34" (UID: "8cfe7179-2161-4aac-b4c4-cdeef9baed34"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.481517 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.518821 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtmsg\" (UniqueName: \"kubernetes.io/projected/2bd61525-e5d2-4258-9ebb-1d0786953372-kube-api-access-vtmsg\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.519226 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-swiftconf\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.519295 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-combined-ca-bundle\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.519362 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2bd61525-e5d2-4258-9ebb-1d0786953372-etc-swift\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.519393 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-dispersionconf\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.519420 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2bd61525-e5d2-4258-9ebb-1d0786953372-ring-data-devices\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.519449 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2bd61525-e5d2-4258-9ebb-1d0786953372-scripts\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.519520 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cfe7179-2161-4aac-b4c4-cdeef9baed34-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.621413 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtmsg\" (UniqueName: \"kubernetes.io/projected/2bd61525-e5d2-4258-9ebb-1d0786953372-kube-api-access-vtmsg\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.621489 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-swiftconf\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.621548 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-combined-ca-bundle\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.621601 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2bd61525-e5d2-4258-9ebb-1d0786953372-etc-swift\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.621626 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-dispersionconf\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.621651 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2bd61525-e5d2-4258-9ebb-1d0786953372-ring-data-devices\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.621679 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2bd61525-e5d2-4258-9ebb-1d0786953372-scripts\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.622404 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2bd61525-e5d2-4258-9ebb-1d0786953372-etc-swift\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.622906 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2bd61525-e5d2-4258-9ebb-1d0786953372-ring-data-devices\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.623325 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2bd61525-e5d2-4258-9ebb-1d0786953372-scripts\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.626141 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-dispersionconf\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.626455 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-combined-ca-bundle\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.627269 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-swiftconf\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.639606 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtmsg\" (UniqueName: \"kubernetes.io/projected/2bd61525-e5d2-4258-9ebb-1d0786953372-kube-api-access-vtmsg\") pod \"swift-ring-rebalance-zcdkn\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.651910 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:07 crc kubenswrapper[5002]: I1203 16:51:07.933648 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:07 crc kubenswrapper[5002]: E1203 16:51:07.935020 5002 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 16:51:07 crc kubenswrapper[5002]: E1203 16:51:07.935043 5002 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 16:51:07 crc kubenswrapper[5002]: E1203 16:51:07.935092 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift podName:f090a614-3703-461c-8152-226a5b53c16a nodeName:}" failed. No retries permitted until 2025-12-03 16:51:08.935073242 +0000 UTC m=+1192.348895130 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift") pod "swift-storage-0" (UID: "f090a614-3703-461c-8152-226a5b53c16a") : configmap "swift-ring-files" not found Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.009583 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"2f69c54d-bd52-413b-86b6-6b5c4ca765ba","Type":"ContainerStarted","Data":"fea9cb37f60037b92107bb54b095e6f2b946d24fabe0764e7886f0b91713e11b"} Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.012173 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"60743dc4-9a30-4fd2-80c1-0c7427241e92","Type":"ContainerStarted","Data":"76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236"} Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.030405 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jkwrt" event={"ID":"f5cc28df-be84-4c87-b0fc-a523c5a23395","Type":"ContainerStarted","Data":"e32836bab6a307b307c2e7ff2105f751a937d80481eef6d5b7b82ddfe55feee5"} Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.033486 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.033518 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc4b48fc9-dn4zf" event={"ID":"8cfe7179-2161-4aac-b4c4-cdeef9baed34","Type":"ContainerDied","Data":"153cc734b75a3805e74cb826c9c8a75f4750474160a209c158326b6949e9b43b"} Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.035421 5002 scope.go:117] "RemoveContainer" containerID="a2fc0440e9074fc53c20561442d14c83c96843538d576159475c9558f6c647cb" Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.042582 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"84ace4a6-f88f-4b88-8bd9-62440a00df18","Type":"ContainerStarted","Data":"9c56e5ddb6de72464477015888e8cbde28a224dd4bd4a8110ce4845a4f8af4d1"} Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.042614 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=15.714850513 podStartE2EDuration="38.042597496s" podCreationTimestamp="2025-12-03 16:50:30 +0000 UTC" firstStartedPulling="2025-12-03 16:50:32.582336256 +0000 UTC m=+1155.996158144" lastFinishedPulling="2025-12-03 16:50:54.910083239 +0000 UTC m=+1178.323905127" observedRunningTime="2025-12-03 16:51:08.038057723 +0000 UTC m=+1191.451879611" watchObservedRunningTime="2025-12-03 16:51:08.042597496 +0000 UTC m=+1191.456419384" Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.046975 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.050916 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hnkdk" event={"ID":"2e876c11-14f1-4e51-90a1-e2cdddc08c87","Type":"ContainerStarted","Data":"aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa"} Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.051756 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-hnkdk" Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.053438 5002 generic.go:334] "Generic (PLEG): container finished" podID="63caefa0-1cea-4ecc-b8e4-6d579f56413c" containerID="d02c07bb87547adf1a99e7e9f5b38133d510168eec4b3d128325dfbc32c027ca" exitCode=0 Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.053484 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" event={"ID":"63caefa0-1cea-4ecc-b8e4-6d579f56413c","Type":"ContainerDied","Data":"d02c07bb87547adf1a99e7e9f5b38133d510168eec4b3d128325dfbc32c027ca"} Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.053501 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" event={"ID":"63caefa0-1cea-4ecc-b8e4-6d579f56413c","Type":"ContainerStarted","Data":"ead2c500afe4972f20f1cad8b12e6877ea97b38de71bd612027bb565e8a74365"} Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.060644 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b132eed4-cb4d-4abc-b49a-55688686288d","Type":"ContainerStarted","Data":"79a5eb86d5fe80c84e8ed1927b16af9e3b75324f11a9482bafaacf52ecc8aa98"} Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.064502 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"9eacefa0-a1f4-4181-ab8e-201efd0fc67e","Type":"ContainerStarted","Data":"104487232d5020db87327b0c923d164a5647072cdec65c4bd2d6deffa890a181"} Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.120727 5002 scope.go:117] "RemoveContainer" containerID="1f6316af561546b5f9f106108beeac82fb9451cdb2bf90d4db146bd24fff176a" Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.127327 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bc4b48fc9-dn4zf"] Dec 03 16:51:08 crc kubenswrapper[5002]: W1203 16:51:08.130084 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bd61525_e5d2_4258_9ebb_1d0786953372.slice/crio-fb03e9f16b95f1de5380a8ca4412effc81e135dc5e61023b88f5cf5b16429a70 WatchSource:0}: Error finding container fb03e9f16b95f1de5380a8ca4412effc81e135dc5e61023b88f5cf5b16429a70: Status 404 returned error can't find the container with id fb03e9f16b95f1de5380a8ca4412effc81e135dc5e61023b88f5cf5b16429a70 Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.165618 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bc4b48fc9-dn4zf"] Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.178522 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-zcdkn"] Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.179829 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=15.462636663 podStartE2EDuration="36.179796528s" podCreationTimestamp="2025-12-03 16:50:32 +0000 UTC" firstStartedPulling="2025-12-03 16:50:34.192885023 +0000 UTC m=+1157.606706911" lastFinishedPulling="2025-12-03 16:50:54.910044888 +0000 UTC m=+1178.323866776" observedRunningTime="2025-12-03 16:51:08.121788981 +0000 UTC m=+1191.535610889" watchObservedRunningTime="2025-12-03 16:51:08.179796528 +0000 UTC m=+1191.593618416" Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.187946 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=21.655362155 podStartE2EDuration="33.18792747s" podCreationTimestamp="2025-12-03 16:50:35 +0000 UTC" firstStartedPulling="2025-12-03 16:50:55.411205231 +0000 UTC m=+1178.825027119" lastFinishedPulling="2025-12-03 16:51:06.943770546 +0000 UTC m=+1190.357592434" observedRunningTime="2025-12-03 16:51:08.142452243 +0000 UTC m=+1191.556274131" watchObservedRunningTime="2025-12-03 16:51:08.18792747 +0000 UTC m=+1191.601749358" Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.203512 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-hnkdk" podStartSLOduration=20.908756502 podStartE2EDuration="30.203489093s" podCreationTimestamp="2025-12-03 16:50:38 +0000 UTC" firstStartedPulling="2025-12-03 16:50:55.533295652 +0000 UTC m=+1178.947117540" lastFinishedPulling="2025-12-03 16:51:04.828028243 +0000 UTC m=+1188.241850131" observedRunningTime="2025-12-03 16:51:08.188185746 +0000 UTC m=+1191.602007634" watchObservedRunningTime="2025-12-03 16:51:08.203489093 +0000 UTC m=+1191.617310981" Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.857621 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cfe7179-2161-4aac-b4c4-cdeef9baed34" path="/var/lib/kubelet/pods/8cfe7179-2161-4aac-b4c4-cdeef9baed34/volumes" Dec 03 16:51:08 crc kubenswrapper[5002]: I1203 16:51:08.965109 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:08 crc kubenswrapper[5002]: E1203 16:51:08.965371 5002 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 16:51:08 crc kubenswrapper[5002]: E1203 16:51:08.965411 5002 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 16:51:08 crc kubenswrapper[5002]: E1203 16:51:08.965509 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift podName:f090a614-3703-461c-8152-226a5b53c16a nodeName:}" failed. No retries permitted until 2025-12-03 16:51:10.965477592 +0000 UTC m=+1194.379299480 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift") pod "swift-storage-0" (UID: "f090a614-3703-461c-8152-226a5b53c16a") : configmap "swift-ring-files" not found Dec 03 16:51:09 crc kubenswrapper[5002]: I1203 16:51:09.083602 5002 generic.go:334] "Generic (PLEG): container finished" podID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerID="e32836bab6a307b307c2e7ff2105f751a937d80481eef6d5b7b82ddfe55feee5" exitCode=0 Dec 03 16:51:09 crc kubenswrapper[5002]: I1203 16:51:09.083693 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jkwrt" event={"ID":"f5cc28df-be84-4c87-b0fc-a523c5a23395","Type":"ContainerDied","Data":"e32836bab6a307b307c2e7ff2105f751a937d80481eef6d5b7b82ddfe55feee5"} Dec 03 16:51:09 crc kubenswrapper[5002]: I1203 16:51:09.086307 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-zcdkn" event={"ID":"2bd61525-e5d2-4258-9ebb-1d0786953372","Type":"ContainerStarted","Data":"fb03e9f16b95f1de5380a8ca4412effc81e135dc5e61023b88f5cf5b16429a70"} Dec 03 16:51:09 crc kubenswrapper[5002]: I1203 16:51:09.093292 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" event={"ID":"63caefa0-1cea-4ecc-b8e4-6d579f56413c","Type":"ContainerStarted","Data":"569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3"} Dec 03 16:51:09 crc kubenswrapper[5002]: I1203 16:51:09.093388 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:09 crc kubenswrapper[5002]: I1203 16:51:09.129301 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" podStartSLOduration=4.129277457 podStartE2EDuration="4.129277457s" podCreationTimestamp="2025-12-03 16:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:51:09.124300821 +0000 UTC m=+1192.538122709" watchObservedRunningTime="2025-12-03 16:51:09.129277457 +0000 UTC m=+1192.543099345" Dec 03 16:51:10 crc kubenswrapper[5002]: I1203 16:51:10.026909 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:51:10 crc kubenswrapper[5002]: I1203 16:51:10.119365 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jkwrt" event={"ID":"f5cc28df-be84-4c87-b0fc-a523c5a23395","Type":"ContainerStarted","Data":"698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e"} Dec 03 16:51:11 crc kubenswrapper[5002]: I1203 16:51:11.026648 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:11 crc kubenswrapper[5002]: E1203 16:51:11.026871 5002 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 16:51:11 crc kubenswrapper[5002]: E1203 16:51:11.026893 5002 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 16:51:11 crc kubenswrapper[5002]: E1203 16:51:11.031268 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift podName:f090a614-3703-461c-8152-226a5b53c16a nodeName:}" failed. No retries permitted until 2025-12-03 16:51:15.026942168 +0000 UTC m=+1198.440764056 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift") pod "swift-storage-0" (UID: "f090a614-3703-461c-8152-226a5b53c16a") : configmap "swift-ring-files" not found Dec 03 16:51:12 crc kubenswrapper[5002]: I1203 16:51:12.308889 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 03 16:51:12 crc kubenswrapper[5002]: I1203 16:51:12.309497 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 03 16:51:13 crc kubenswrapper[5002]: I1203 16:51:13.694335 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 03 16:51:13 crc kubenswrapper[5002]: I1203 16:51:13.695029 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 03 16:51:13 crc kubenswrapper[5002]: I1203 16:51:13.818710 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.167539 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"2f69c54d-bd52-413b-86b6-6b5c4ca765ba","Type":"ContainerStarted","Data":"9fb4d5ac070c404dafdfc058deb933ce0850d4c297f125e77eac745370a46ed9"} Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.170682 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-zcdkn" event={"ID":"2bd61525-e5d2-4258-9ebb-1d0786953372","Type":"ContainerStarted","Data":"90aabd49e1b960e251f35578f1d41446c037e761d8d73e0a913a8379ef640fd1"} Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.174644 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"9eacefa0-a1f4-4181-ab8e-201efd0fc67e","Type":"ContainerStarted","Data":"19fc75cf1b1a687269a530436d2398a6475ba55eef4ea65b1527d796cfd116c4"} Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.184055 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jkwrt" event={"ID":"f5cc28df-be84-4c87-b0fc-a523c5a23395","Type":"ContainerStarted","Data":"226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff"} Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.184342 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.184380 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.200604 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=16.719398442 podStartE2EDuration="36.200577709s" podCreationTimestamp="2025-12-03 16:50:38 +0000 UTC" firstStartedPulling="2025-12-03 16:50:53.800842865 +0000 UTC m=+1177.214664793" lastFinishedPulling="2025-12-03 16:51:13.282022132 +0000 UTC m=+1196.695844060" observedRunningTime="2025-12-03 16:51:14.194149654 +0000 UTC m=+1197.607971562" watchObservedRunningTime="2025-12-03 16:51:14.200577709 +0000 UTC m=+1197.614399597" Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.263365 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-zcdkn" podStartSLOduration=2.13160839 podStartE2EDuration="7.263348996s" podCreationTimestamp="2025-12-03 16:51:07 +0000 UTC" firstStartedPulling="2025-12-03 16:51:08.149486694 +0000 UTC m=+1191.563308582" lastFinishedPulling="2025-12-03 16:51:13.28122726 +0000 UTC m=+1196.695049188" observedRunningTime="2025-12-03 16:51:14.225587269 +0000 UTC m=+1197.639409157" watchObservedRunningTime="2025-12-03 16:51:14.263348996 +0000 UTC m=+1197.677170884" Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.264398 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=15.869393297 podStartE2EDuration="33.264393885s" podCreationTimestamp="2025-12-03 16:50:41 +0000 UTC" firstStartedPulling="2025-12-03 16:50:55.856403071 +0000 UTC m=+1179.270224959" lastFinishedPulling="2025-12-03 16:51:13.251403659 +0000 UTC m=+1196.665225547" observedRunningTime="2025-12-03 16:51:14.256875621 +0000 UTC m=+1197.670697559" watchObservedRunningTime="2025-12-03 16:51:14.264393885 +0000 UTC m=+1197.678215763" Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.293029 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-jkwrt" podStartSLOduration=26.917418674 podStartE2EDuration="36.292996233s" podCreationTimestamp="2025-12-03 16:50:38 +0000 UTC" firstStartedPulling="2025-12-03 16:50:55.606907525 +0000 UTC m=+1179.020729413" lastFinishedPulling="2025-12-03 16:51:04.982485084 +0000 UTC m=+1188.396306972" observedRunningTime="2025-12-03 16:51:14.288543511 +0000 UTC m=+1197.702365399" watchObservedRunningTime="2025-12-03 16:51:14.292996233 +0000 UTC m=+1197.706818121" Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.328092 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.526961 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.645177 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 03 16:51:14 crc kubenswrapper[5002]: I1203 16:51:14.975051 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 03 16:51:15 crc kubenswrapper[5002]: I1203 16:51:15.116504 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:15 crc kubenswrapper[5002]: E1203 16:51:15.116860 5002 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 16:51:15 crc kubenswrapper[5002]: E1203 16:51:15.117072 5002 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 16:51:15 crc kubenswrapper[5002]: E1203 16:51:15.117181 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift podName:f090a614-3703-461c-8152-226a5b53c16a nodeName:}" failed. No retries permitted until 2025-12-03 16:51:23.117144352 +0000 UTC m=+1206.530966250 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift") pod "swift-storage-0" (UID: "f090a614-3703-461c-8152-226a5b53c16a") : configmap "swift-ring-files" not found Dec 03 16:51:15 crc kubenswrapper[5002]: I1203 16:51:15.804312 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 16:51:15 crc kubenswrapper[5002]: I1203 16:51:15.975650 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.020056 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.141282 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.210116 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.269343 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.418955 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.493257 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cb666b895-qmpf6"] Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.493586 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" podUID="f0c2ebd9-6979-42b4-9537-267e2a36a8c7" containerName="dnsmasq-dns" containerID="cri-o://97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da" gracePeriod=10 Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.614831 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-565f79875c-dllhh"] Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.616565 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.619280 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.625702 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-565f79875c-dllhh"] Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.738188 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-qnvnc"] Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.746531 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.748206 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-qnvnc"] Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.755798 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.762014 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-config\") pod \"dnsmasq-dns-565f79875c-dllhh\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.762095 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4crdp\" (UniqueName: \"kubernetes.io/projected/7e5774a8-cad5-4d91-9507-2f1a710aad55-kube-api-access-4crdp\") pod \"dnsmasq-dns-565f79875c-dllhh\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.762120 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-ovsdbserver-nb\") pod \"dnsmasq-dns-565f79875c-dllhh\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.762182 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-dns-svc\") pod \"dnsmasq-dns-565f79875c-dllhh\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.863817 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56vvz\" (UniqueName: \"kubernetes.io/projected/9f59bead-66d7-4fcb-842f-e573fcadcf1f-kube-api-access-56vvz\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.864404 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-config\") pod \"dnsmasq-dns-565f79875c-dllhh\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.864442 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4crdp\" (UniqueName: \"kubernetes.io/projected/7e5774a8-cad5-4d91-9507-2f1a710aad55-kube-api-access-4crdp\") pod \"dnsmasq-dns-565f79875c-dllhh\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.864463 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-ovsdbserver-nb\") pod \"dnsmasq-dns-565f79875c-dllhh\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.864488 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f59bead-66d7-4fcb-842f-e573fcadcf1f-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.864513 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f59bead-66d7-4fcb-842f-e573fcadcf1f-combined-ca-bundle\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.864531 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-dns-svc\") pod \"dnsmasq-dns-565f79875c-dllhh\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.864558 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9f59bead-66d7-4fcb-842f-e573fcadcf1f-ovn-rundir\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.864593 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f59bead-66d7-4fcb-842f-e573fcadcf1f-config\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.864618 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9f59bead-66d7-4fcb-842f-e573fcadcf1f-ovs-rundir\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.865585 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-config\") pod \"dnsmasq-dns-565f79875c-dllhh\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.866566 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-dns-svc\") pod \"dnsmasq-dns-565f79875c-dllhh\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.870392 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.878897 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-ovsdbserver-nb\") pod \"dnsmasq-dns-565f79875c-dllhh\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.890849 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4crdp\" (UniqueName: \"kubernetes.io/projected/7e5774a8-cad5-4d91-9507-2f1a710aad55-kube-api-access-4crdp\") pod \"dnsmasq-dns-565f79875c-dllhh\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.938956 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-565f79875c-dllhh"] Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.952613 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.967381 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f59bead-66d7-4fcb-842f-e573fcadcf1f-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.967453 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f59bead-66d7-4fcb-842f-e573fcadcf1f-combined-ca-bundle\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.967476 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9f59bead-66d7-4fcb-842f-e573fcadcf1f-ovn-rundir\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.967513 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f59bead-66d7-4fcb-842f-e573fcadcf1f-config\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.967542 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9f59bead-66d7-4fcb-842f-e573fcadcf1f-ovs-rundir\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.967579 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56vvz\" (UniqueName: \"kubernetes.io/projected/9f59bead-66d7-4fcb-842f-e573fcadcf1f-kube-api-access-56vvz\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.968364 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9f59bead-66d7-4fcb-842f-e573fcadcf1f-ovs-rundir\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.968441 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9f59bead-66d7-4fcb-842f-e573fcadcf1f-ovn-rundir\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.971612 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.975188 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59d5fbdd8c-6nt4j"] Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.976938 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.979232 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f59bead-66d7-4fcb-842f-e573fcadcf1f-config\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.980518 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f59bead-66d7-4fcb-842f-e573fcadcf1f-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:16 crc kubenswrapper[5002]: I1203 16:51:16.980933 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:16.998643 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56vvz\" (UniqueName: \"kubernetes.io/projected/9f59bead-66d7-4fcb-842f-e573fcadcf1f-kube-api-access-56vvz\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:16.998992 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f59bead-66d7-4fcb-842f-e573fcadcf1f-combined-ca-bundle\") pod \"ovn-controller-metrics-qnvnc\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.005098 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5fbdd8c-6nt4j"] Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.070989 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-dns-svc\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.071401 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmsg2\" (UniqueName: \"kubernetes.io/projected/60bfa9cd-a2dd-4460-94b8-495a002dbf89-kube-api-access-dmsg2\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.071467 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-config\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.071504 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.071532 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.071826 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.122938 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.173566 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-dns-svc\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.173619 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmsg2\" (UniqueName: \"kubernetes.io/projected/60bfa9cd-a2dd-4460-94b8-495a002dbf89-kube-api-access-dmsg2\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.173684 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-config\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.173716 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.173760 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.175212 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-dns-svc\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.181507 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.183097 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-config\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.183576 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.199826 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmsg2\" (UniqueName: \"kubernetes.io/projected/60bfa9cd-a2dd-4460-94b8-495a002dbf89-kube-api-access-dmsg2\") pod \"dnsmasq-dns-59d5fbdd8c-6nt4j\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.223491 5002 generic.go:334] "Generic (PLEG): container finished" podID="f0c2ebd9-6979-42b4-9537-267e2a36a8c7" containerID="97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da" exitCode=0 Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.224847 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.224973 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" event={"ID":"f0c2ebd9-6979-42b4-9537-267e2a36a8c7","Type":"ContainerDied","Data":"97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da"} Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.225063 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.225081 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb666b895-qmpf6" event={"ID":"f0c2ebd9-6979-42b4-9537-267e2a36a8c7","Type":"ContainerDied","Data":"9575cd8bf32c7d556d1a35f1d170584748941163b1287420cb7a067c5b384c78"} Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.225126 5002 scope.go:117] "RemoveContainer" containerID="97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.253614 5002 scope.go:117] "RemoveContainer" containerID="ad00b6a4b0a33208cd7d6a05afa37a06cbbfe8602a860af70dfdb6abee90acd3" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.275008 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-config\") pod \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\" (UID: \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\") " Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.275080 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-dns-svc\") pod \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\" (UID: \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\") " Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.275314 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8n5f\" (UniqueName: \"kubernetes.io/projected/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-kube-api-access-q8n5f\") pod \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\" (UID: \"f0c2ebd9-6979-42b4-9537-267e2a36a8c7\") " Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.276387 5002 scope.go:117] "RemoveContainer" containerID="97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da" Dec 03 16:51:17 crc kubenswrapper[5002]: E1203 16:51:17.278427 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da\": container with ID starting with 97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da not found: ID does not exist" containerID="97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.278573 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da"} err="failed to get container status \"97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da\": rpc error: code = NotFound desc = could not find container \"97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da\": container with ID starting with 97eea90d2f6930d22d358985a7a2f465b3415b5ba91b3fdcd6c82199b275f3da not found: ID does not exist" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.278695 5002 scope.go:117] "RemoveContainer" containerID="ad00b6a4b0a33208cd7d6a05afa37a06cbbfe8602a860af70dfdb6abee90acd3" Dec 03 16:51:17 crc kubenswrapper[5002]: E1203 16:51:17.279293 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad00b6a4b0a33208cd7d6a05afa37a06cbbfe8602a860af70dfdb6abee90acd3\": container with ID starting with ad00b6a4b0a33208cd7d6a05afa37a06cbbfe8602a860af70dfdb6abee90acd3 not found: ID does not exist" containerID="ad00b6a4b0a33208cd7d6a05afa37a06cbbfe8602a860af70dfdb6abee90acd3" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.279368 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad00b6a4b0a33208cd7d6a05afa37a06cbbfe8602a860af70dfdb6abee90acd3"} err="failed to get container status \"ad00b6a4b0a33208cd7d6a05afa37a06cbbfe8602a860af70dfdb6abee90acd3\": rpc error: code = NotFound desc = could not find container \"ad00b6a4b0a33208cd7d6a05afa37a06cbbfe8602a860af70dfdb6abee90acd3\": container with ID starting with ad00b6a4b0a33208cd7d6a05afa37a06cbbfe8602a860af70dfdb6abee90acd3 not found: ID does not exist" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.280785 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.281166 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-kube-api-access-q8n5f" (OuterVolumeSpecName: "kube-api-access-q8n5f") pod "f0c2ebd9-6979-42b4-9537-267e2a36a8c7" (UID: "f0c2ebd9-6979-42b4-9537-267e2a36a8c7"). InnerVolumeSpecName "kube-api-access-q8n5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.328966 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-config" (OuterVolumeSpecName: "config") pod "f0c2ebd9-6979-42b4-9537-267e2a36a8c7" (UID: "f0c2ebd9-6979-42b4-9537-267e2a36a8c7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.346990 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f0c2ebd9-6979-42b4-9537-267e2a36a8c7" (UID: "f0c2ebd9-6979-42b4-9537-267e2a36a8c7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.379454 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.379531 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.379542 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8n5f\" (UniqueName: \"kubernetes.io/projected/f0c2ebd9-6979-42b4-9537-267e2a36a8c7-kube-api-access-q8n5f\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.414272 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.534838 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-565f79875c-dllhh"] Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.550283 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 03 16:51:17 crc kubenswrapper[5002]: E1203 16:51:17.550821 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0c2ebd9-6979-42b4-9537-267e2a36a8c7" containerName="init" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.550849 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0c2ebd9-6979-42b4-9537-267e2a36a8c7" containerName="init" Dec 03 16:51:17 crc kubenswrapper[5002]: E1203 16:51:17.550877 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0c2ebd9-6979-42b4-9537-267e2a36a8c7" containerName="dnsmasq-dns" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.550883 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0c2ebd9-6979-42b4-9537-267e2a36a8c7" containerName="dnsmasq-dns" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.553641 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0c2ebd9-6979-42b4-9537-267e2a36a8c7" containerName="dnsmasq-dns" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.554835 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.561527 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.561848 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.562005 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.568247 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.574388 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-sfgzn" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.651813 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cb666b895-qmpf6"] Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.672697 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cb666b895-qmpf6"] Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.684384 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-qnvnc"] Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.698977 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.699155 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29a68818-9346-4437-9527-aea9383c1a25-config\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.699263 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.699457 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7dfn\" (UniqueName: \"kubernetes.io/projected/29a68818-9346-4437-9527-aea9383c1a25-kube-api-access-n7dfn\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.699487 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29a68818-9346-4437-9527-aea9383c1a25-scripts\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.699580 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.699636 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/29a68818-9346-4437-9527-aea9383c1a25-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.802241 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29a68818-9346-4437-9527-aea9383c1a25-config\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.802775 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.803662 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29a68818-9346-4437-9527-aea9383c1a25-config\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.803974 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7dfn\" (UniqueName: \"kubernetes.io/projected/29a68818-9346-4437-9527-aea9383c1a25-kube-api-access-n7dfn\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.804030 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29a68818-9346-4437-9527-aea9383c1a25-scripts\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.804063 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.804120 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/29a68818-9346-4437-9527-aea9383c1a25-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.804151 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.804670 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/29a68818-9346-4437-9527-aea9383c1a25-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.805149 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29a68818-9346-4437-9527-aea9383c1a25-scripts\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.808978 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.811233 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.812478 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.827575 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7dfn\" (UniqueName: \"kubernetes.io/projected/29a68818-9346-4437-9527-aea9383c1a25-kube-api-access-n7dfn\") pod \"ovn-northd-0\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.894966 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 16:51:17 crc kubenswrapper[5002]: I1203 16:51:17.994870 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5fbdd8c-6nt4j"] Dec 03 16:51:18 crc kubenswrapper[5002]: W1203 16:51:17.999739 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60bfa9cd_a2dd_4460_94b8_495a002dbf89.slice/crio-d1923fdf0ab332898148191aaba3ecd4ecc2c6310ef8790cb287181efeac37be WatchSource:0}: Error finding container d1923fdf0ab332898148191aaba3ecd4ecc2c6310ef8790cb287181efeac37be: Status 404 returned error can't find the container with id d1923fdf0ab332898148191aaba3ecd4ecc2c6310ef8790cb287181efeac37be Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.238478 5002 generic.go:334] "Generic (PLEG): container finished" podID="7e5774a8-cad5-4d91-9507-2f1a710aad55" containerID="b6be495eeb5802ffcd0489556fee146a935d1cc20a91ea3181455c9af9d8dd68" exitCode=0 Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.238574 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-565f79875c-dllhh" event={"ID":"7e5774a8-cad5-4d91-9507-2f1a710aad55","Type":"ContainerDied","Data":"b6be495eeb5802ffcd0489556fee146a935d1cc20a91ea3181455c9af9d8dd68"} Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.239055 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-565f79875c-dllhh" event={"ID":"7e5774a8-cad5-4d91-9507-2f1a710aad55","Type":"ContainerStarted","Data":"aa8c8fee2355993b09e61725dd904886458f56ff8164bc30895e1c271535da1a"} Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.250062 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" event={"ID":"60bfa9cd-a2dd-4460-94b8-495a002dbf89","Type":"ContainerStarted","Data":"02e69d3601c9b6e85702d77583a2b6d167247409b34fa1a2db136d575b2e3286"} Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.250149 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" event={"ID":"60bfa9cd-a2dd-4460-94b8-495a002dbf89","Type":"ContainerStarted","Data":"d1923fdf0ab332898148191aaba3ecd4ecc2c6310ef8790cb287181efeac37be"} Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.252541 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qnvnc" event={"ID":"9f59bead-66d7-4fcb-842f-e573fcadcf1f","Type":"ContainerStarted","Data":"2f14606baed65302b953c10a81a0db167dba604320e45146e5757544b0bb9b0c"} Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.252649 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qnvnc" event={"ID":"9f59bead-66d7-4fcb-842f-e573fcadcf1f","Type":"ContainerStarted","Data":"13d013a19c6910bc14e78d414164bc6ef0bce38644604d22080f59e3d5cc9aa5"} Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.314776 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-qnvnc" podStartSLOduration=2.314733894 podStartE2EDuration="2.314733894s" podCreationTimestamp="2025-12-03 16:51:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:51:18.299590422 +0000 UTC m=+1201.713412310" watchObservedRunningTime="2025-12-03 16:51:18.314733894 +0000 UTC m=+1201.728555782" Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.427988 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.528058 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.629421 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-dns-svc\") pod \"7e5774a8-cad5-4d91-9507-2f1a710aad55\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.630980 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-config\") pod \"7e5774a8-cad5-4d91-9507-2f1a710aad55\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.631069 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4crdp\" (UniqueName: \"kubernetes.io/projected/7e5774a8-cad5-4d91-9507-2f1a710aad55-kube-api-access-4crdp\") pod \"7e5774a8-cad5-4d91-9507-2f1a710aad55\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.631154 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-ovsdbserver-nb\") pod \"7e5774a8-cad5-4d91-9507-2f1a710aad55\" (UID: \"7e5774a8-cad5-4d91-9507-2f1a710aad55\") " Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.636895 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e5774a8-cad5-4d91-9507-2f1a710aad55-kube-api-access-4crdp" (OuterVolumeSpecName: "kube-api-access-4crdp") pod "7e5774a8-cad5-4d91-9507-2f1a710aad55" (UID: "7e5774a8-cad5-4d91-9507-2f1a710aad55"). InnerVolumeSpecName "kube-api-access-4crdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.652882 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-config" (OuterVolumeSpecName: "config") pod "7e5774a8-cad5-4d91-9507-2f1a710aad55" (UID: "7e5774a8-cad5-4d91-9507-2f1a710aad55"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.656420 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7e5774a8-cad5-4d91-9507-2f1a710aad55" (UID: "7e5774a8-cad5-4d91-9507-2f1a710aad55"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.673871 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7e5774a8-cad5-4d91-9507-2f1a710aad55" (UID: "7e5774a8-cad5-4d91-9507-2f1a710aad55"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.734475 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.735171 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.735186 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4crdp\" (UniqueName: \"kubernetes.io/projected/7e5774a8-cad5-4d91-9507-2f1a710aad55-kube-api-access-4crdp\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.735204 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e5774a8-cad5-4d91-9507-2f1a710aad55-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:18 crc kubenswrapper[5002]: I1203 16:51:18.870193 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0c2ebd9-6979-42b4-9537-267e2a36a8c7" path="/var/lib/kubelet/pods/f0c2ebd9-6979-42b4-9537-267e2a36a8c7/volumes" Dec 03 16:51:19 crc kubenswrapper[5002]: I1203 16:51:19.284476 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-565f79875c-dllhh" Dec 03 16:51:19 crc kubenswrapper[5002]: I1203 16:51:19.284466 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-565f79875c-dllhh" event={"ID":"7e5774a8-cad5-4d91-9507-2f1a710aad55","Type":"ContainerDied","Data":"aa8c8fee2355993b09e61725dd904886458f56ff8164bc30895e1c271535da1a"} Dec 03 16:51:19 crc kubenswrapper[5002]: I1203 16:51:19.284588 5002 scope.go:117] "RemoveContainer" containerID="b6be495eeb5802ffcd0489556fee146a935d1cc20a91ea3181455c9af9d8dd68" Dec 03 16:51:19 crc kubenswrapper[5002]: I1203 16:51:19.312259 5002 generic.go:334] "Generic (PLEG): container finished" podID="60bfa9cd-a2dd-4460-94b8-495a002dbf89" containerID="02e69d3601c9b6e85702d77583a2b6d167247409b34fa1a2db136d575b2e3286" exitCode=0 Dec 03 16:51:19 crc kubenswrapper[5002]: I1203 16:51:19.312651 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" event={"ID":"60bfa9cd-a2dd-4460-94b8-495a002dbf89","Type":"ContainerDied","Data":"02e69d3601c9b6e85702d77583a2b6d167247409b34fa1a2db136d575b2e3286"} Dec 03 16:51:19 crc kubenswrapper[5002]: I1203 16:51:19.312714 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" event={"ID":"60bfa9cd-a2dd-4460-94b8-495a002dbf89","Type":"ContainerStarted","Data":"fe2374b4357280cf4c7bbab3ad1bad480db444706c268289717988a39d61762d"} Dec 03 16:51:19 crc kubenswrapper[5002]: I1203 16:51:19.318467 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:19 crc kubenswrapper[5002]: I1203 16:51:19.330937 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"29a68818-9346-4437-9527-aea9383c1a25","Type":"ContainerStarted","Data":"0f495ec01230cedb5d9aa47c7518f3e4e3ccb1c272ff505b0a451117c9eff00c"} Dec 03 16:51:19 crc kubenswrapper[5002]: I1203 16:51:19.370864 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-565f79875c-dllhh"] Dec 03 16:51:19 crc kubenswrapper[5002]: I1203 16:51:19.377625 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-565f79875c-dllhh"] Dec 03 16:51:19 crc kubenswrapper[5002]: I1203 16:51:19.384272 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" podStartSLOduration=3.384245998 podStartE2EDuration="3.384245998s" podCreationTimestamp="2025-12-03 16:51:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:51:19.365914309 +0000 UTC m=+1202.779736197" watchObservedRunningTime="2025-12-03 16:51:19.384245998 +0000 UTC m=+1202.798067886" Dec 03 16:51:20 crc kubenswrapper[5002]: I1203 16:51:20.345195 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"29a68818-9346-4437-9527-aea9383c1a25","Type":"ContainerStarted","Data":"d01d9155c2a1be73fba2d20cfcf3edb5d0d73c8658491e3d1015b359c087e0ad"} Dec 03 16:51:20 crc kubenswrapper[5002]: I1203 16:51:20.345896 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"29a68818-9346-4437-9527-aea9383c1a25","Type":"ContainerStarted","Data":"8d6e5fcf1f9f04b8b697e12bf0ae6fa6a1d7ab6a6a2509530bdd7448a85fdb39"} Dec 03 16:51:20 crc kubenswrapper[5002]: I1203 16:51:20.346532 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 03 16:51:20 crc kubenswrapper[5002]: I1203 16:51:20.376425 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.092633174 podStartE2EDuration="3.376362535s" podCreationTimestamp="2025-12-03 16:51:17 +0000 UTC" firstStartedPulling="2025-12-03 16:51:18.444636688 +0000 UTC m=+1201.858458576" lastFinishedPulling="2025-12-03 16:51:19.728366049 +0000 UTC m=+1203.142187937" observedRunningTime="2025-12-03 16:51:20.366343793 +0000 UTC m=+1203.780165721" watchObservedRunningTime="2025-12-03 16:51:20.376362535 +0000 UTC m=+1203.790184423" Dec 03 16:51:20 crc kubenswrapper[5002]: I1203 16:51:20.856856 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e5774a8-cad5-4d91-9507-2f1a710aad55" path="/var/lib/kubelet/pods/7e5774a8-cad5-4d91-9507-2f1a710aad55/volumes" Dec 03 16:51:20 crc kubenswrapper[5002]: I1203 16:51:20.921089 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:51:20 crc kubenswrapper[5002]: I1203 16:51:20.921187 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:51:21 crc kubenswrapper[5002]: I1203 16:51:21.378024 5002 generic.go:334] "Generic (PLEG): container finished" podID="2bd61525-e5d2-4258-9ebb-1d0786953372" containerID="90aabd49e1b960e251f35578f1d41446c037e761d8d73e0a913a8379ef640fd1" exitCode=0 Dec 03 16:51:21 crc kubenswrapper[5002]: I1203 16:51:21.378151 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-zcdkn" event={"ID":"2bd61525-e5d2-4258-9ebb-1d0786953372","Type":"ContainerDied","Data":"90aabd49e1b960e251f35578f1d41446c037e761d8d73e0a913a8379ef640fd1"} Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.838436 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.843173 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-dispersionconf\") pod \"2bd61525-e5d2-4258-9ebb-1d0786953372\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.843226 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2bd61525-e5d2-4258-9ebb-1d0786953372-ring-data-devices\") pod \"2bd61525-e5d2-4258-9ebb-1d0786953372\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.843262 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-combined-ca-bundle\") pod \"2bd61525-e5d2-4258-9ebb-1d0786953372\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.843345 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2bd61525-e5d2-4258-9ebb-1d0786953372-scripts\") pod \"2bd61525-e5d2-4258-9ebb-1d0786953372\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.843389 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtmsg\" (UniqueName: \"kubernetes.io/projected/2bd61525-e5d2-4258-9ebb-1d0786953372-kube-api-access-vtmsg\") pod \"2bd61525-e5d2-4258-9ebb-1d0786953372\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.844340 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bd61525-e5d2-4258-9ebb-1d0786953372-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "2bd61525-e5d2-4258-9ebb-1d0786953372" (UID: "2bd61525-e5d2-4258-9ebb-1d0786953372"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.850650 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bd61525-e5d2-4258-9ebb-1d0786953372-kube-api-access-vtmsg" (OuterVolumeSpecName: "kube-api-access-vtmsg") pod "2bd61525-e5d2-4258-9ebb-1d0786953372" (UID: "2bd61525-e5d2-4258-9ebb-1d0786953372"). InnerVolumeSpecName "kube-api-access-vtmsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.867081 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "2bd61525-e5d2-4258-9ebb-1d0786953372" (UID: "2bd61525-e5d2-4258-9ebb-1d0786953372"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.890734 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bd61525-e5d2-4258-9ebb-1d0786953372-scripts" (OuterVolumeSpecName: "scripts") pod "2bd61525-e5d2-4258-9ebb-1d0786953372" (UID: "2bd61525-e5d2-4258-9ebb-1d0786953372"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.902690 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2bd61525-e5d2-4258-9ebb-1d0786953372" (UID: "2bd61525-e5d2-4258-9ebb-1d0786953372"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.945434 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-swiftconf\") pod \"2bd61525-e5d2-4258-9ebb-1d0786953372\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.945511 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2bd61525-e5d2-4258-9ebb-1d0786953372-etc-swift\") pod \"2bd61525-e5d2-4258-9ebb-1d0786953372\" (UID: \"2bd61525-e5d2-4258-9ebb-1d0786953372\") " Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.945798 5002 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.945813 5002 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2bd61525-e5d2-4258-9ebb-1d0786953372-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.945822 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.945832 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2bd61525-e5d2-4258-9ebb-1d0786953372-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.945842 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtmsg\" (UniqueName: \"kubernetes.io/projected/2bd61525-e5d2-4258-9ebb-1d0786953372-kube-api-access-vtmsg\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.947155 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bd61525-e5d2-4258-9ebb-1d0786953372-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2bd61525-e5d2-4258-9ebb-1d0786953372" (UID: "2bd61525-e5d2-4258-9ebb-1d0786953372"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:51:22 crc kubenswrapper[5002]: I1203 16:51:22.969310 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "2bd61525-e5d2-4258-9ebb-1d0786953372" (UID: "2bd61525-e5d2-4258-9ebb-1d0786953372"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.047245 5002 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2bd61525-e5d2-4258-9ebb-1d0786953372-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.047282 5002 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2bd61525-e5d2-4258-9ebb-1d0786953372-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.149670 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.159716 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift\") pod \"swift-storage-0\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " pod="openstack/swift-storage-0" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.409474 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-zcdkn" event={"ID":"2bd61525-e5d2-4258-9ebb-1d0786953372","Type":"ContainerDied","Data":"fb03e9f16b95f1de5380a8ca4412effc81e135dc5e61023b88f5cf5b16429a70"} Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.409533 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb03e9f16b95f1de5380a8ca4412effc81e135dc5e61023b88f5cf5b16429a70" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.409667 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zcdkn" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.448775 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.703933 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-gf624"] Dec 03 16:51:23 crc kubenswrapper[5002]: E1203 16:51:23.704857 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e5774a8-cad5-4d91-9507-2f1a710aad55" containerName="init" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.704891 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e5774a8-cad5-4d91-9507-2f1a710aad55" containerName="init" Dec 03 16:51:23 crc kubenswrapper[5002]: E1203 16:51:23.704930 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bd61525-e5d2-4258-9ebb-1d0786953372" containerName="swift-ring-rebalance" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.704938 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bd61525-e5d2-4258-9ebb-1d0786953372" containerName="swift-ring-rebalance" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.705119 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e5774a8-cad5-4d91-9507-2f1a710aad55" containerName="init" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.705147 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bd61525-e5d2-4258-9ebb-1d0786953372" containerName="swift-ring-rebalance" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.705857 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gf624" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.720381 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-864c-account-create-update-f9jhh"] Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.722470 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-864c-account-create-update-f9jhh" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.726294 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.733381 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-gf624"] Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.742215 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-864c-account-create-update-f9jhh"] Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.868230 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vkct\" (UniqueName: \"kubernetes.io/projected/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f-kube-api-access-4vkct\") pod \"keystone-864c-account-create-update-f9jhh\" (UID: \"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f\") " pod="openstack/keystone-864c-account-create-update-f9jhh" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.868392 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzsks\" (UniqueName: \"kubernetes.io/projected/35fdb887-bef5-460d-af35-20096f2b736c-kube-api-access-mzsks\") pod \"keystone-db-create-gf624\" (UID: \"35fdb887-bef5-460d-af35-20096f2b736c\") " pod="openstack/keystone-db-create-gf624" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.868445 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f-operator-scripts\") pod \"keystone-864c-account-create-update-f9jhh\" (UID: \"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f\") " pod="openstack/keystone-864c-account-create-update-f9jhh" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.868546 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35fdb887-bef5-460d-af35-20096f2b736c-operator-scripts\") pod \"keystone-db-create-gf624\" (UID: \"35fdb887-bef5-460d-af35-20096f2b736c\") " pod="openstack/keystone-db-create-gf624" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.932527 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-bj88z"] Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.934415 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bj88z" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.940219 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-bj88z"] Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.970674 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35fdb887-bef5-460d-af35-20096f2b736c-operator-scripts\") pod \"keystone-db-create-gf624\" (UID: \"35fdb887-bef5-460d-af35-20096f2b736c\") " pod="openstack/keystone-db-create-gf624" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.970793 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111-operator-scripts\") pod \"placement-db-create-bj88z\" (UID: \"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111\") " pod="openstack/placement-db-create-bj88z" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.971283 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ttj7\" (UniqueName: \"kubernetes.io/projected/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111-kube-api-access-2ttj7\") pod \"placement-db-create-bj88z\" (UID: \"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111\") " pod="openstack/placement-db-create-bj88z" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.971523 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vkct\" (UniqueName: \"kubernetes.io/projected/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f-kube-api-access-4vkct\") pod \"keystone-864c-account-create-update-f9jhh\" (UID: \"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f\") " pod="openstack/keystone-864c-account-create-update-f9jhh" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.971604 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzsks\" (UniqueName: \"kubernetes.io/projected/35fdb887-bef5-460d-af35-20096f2b736c-kube-api-access-mzsks\") pod \"keystone-db-create-gf624\" (UID: \"35fdb887-bef5-460d-af35-20096f2b736c\") " pod="openstack/keystone-db-create-gf624" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.971635 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f-operator-scripts\") pod \"keystone-864c-account-create-update-f9jhh\" (UID: \"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f\") " pod="openstack/keystone-864c-account-create-update-f9jhh" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.971881 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35fdb887-bef5-460d-af35-20096f2b736c-operator-scripts\") pod \"keystone-db-create-gf624\" (UID: \"35fdb887-bef5-460d-af35-20096f2b736c\") " pod="openstack/keystone-db-create-gf624" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.972923 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f-operator-scripts\") pod \"keystone-864c-account-create-update-f9jhh\" (UID: \"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f\") " pod="openstack/keystone-864c-account-create-update-f9jhh" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.992184 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vkct\" (UniqueName: \"kubernetes.io/projected/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f-kube-api-access-4vkct\") pod \"keystone-864c-account-create-update-f9jhh\" (UID: \"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f\") " pod="openstack/keystone-864c-account-create-update-f9jhh" Dec 03 16:51:23 crc kubenswrapper[5002]: I1203 16:51:23.992486 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzsks\" (UniqueName: \"kubernetes.io/projected/35fdb887-bef5-460d-af35-20096f2b736c-kube-api-access-mzsks\") pod \"keystone-db-create-gf624\" (UID: \"35fdb887-bef5-460d-af35-20096f2b736c\") " pod="openstack/keystone-db-create-gf624" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.032940 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gf624" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.072129 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-e6cd-account-create-update-v4z66"] Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.072496 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ttj7\" (UniqueName: \"kubernetes.io/projected/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111-kube-api-access-2ttj7\") pod \"placement-db-create-bj88z\" (UID: \"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111\") " pod="openstack/placement-db-create-bj88z" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.072595 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111-operator-scripts\") pod \"placement-db-create-bj88z\" (UID: \"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111\") " pod="openstack/placement-db-create-bj88z" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.073675 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111-operator-scripts\") pod \"placement-db-create-bj88z\" (UID: \"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111\") " pod="openstack/placement-db-create-bj88z" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.074007 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e6cd-account-create-update-v4z66" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.077889 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.088231 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-864c-account-create-update-f9jhh" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.088956 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e6cd-account-create-update-v4z66"] Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.101203 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ttj7\" (UniqueName: \"kubernetes.io/projected/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111-kube-api-access-2ttj7\") pod \"placement-db-create-bj88z\" (UID: \"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111\") " pod="openstack/placement-db-create-bj88z" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.207141 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.250287 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bj88z" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.276416 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbdf0c1c-4684-45ae-aedb-d91817e50576-operator-scripts\") pod \"placement-e6cd-account-create-update-v4z66\" (UID: \"fbdf0c1c-4684-45ae-aedb-d91817e50576\") " pod="openstack/placement-e6cd-account-create-update-v4z66" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.276480 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8nh2\" (UniqueName: \"kubernetes.io/projected/fbdf0c1c-4684-45ae-aedb-d91817e50576-kube-api-access-v8nh2\") pod \"placement-e6cd-account-create-update-v4z66\" (UID: \"fbdf0c1c-4684-45ae-aedb-d91817e50576\") " pod="openstack/placement-e6cd-account-create-update-v4z66" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.351414 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-gf624"] Dec 03 16:51:24 crc kubenswrapper[5002]: W1203 16:51:24.358065 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35fdb887_bef5_460d_af35_20096f2b736c.slice/crio-4f9a9c00ebe6dfe27b947ec0146b00dd5527f8a1daa645ba1035e98bc9742328 WatchSource:0}: Error finding container 4f9a9c00ebe6dfe27b947ec0146b00dd5527f8a1daa645ba1035e98bc9742328: Status 404 returned error can't find the container with id 4f9a9c00ebe6dfe27b947ec0146b00dd5527f8a1daa645ba1035e98bc9742328 Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.378057 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbdf0c1c-4684-45ae-aedb-d91817e50576-operator-scripts\") pod \"placement-e6cd-account-create-update-v4z66\" (UID: \"fbdf0c1c-4684-45ae-aedb-d91817e50576\") " pod="openstack/placement-e6cd-account-create-update-v4z66" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.378100 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8nh2\" (UniqueName: \"kubernetes.io/projected/fbdf0c1c-4684-45ae-aedb-d91817e50576-kube-api-access-v8nh2\") pod \"placement-e6cd-account-create-update-v4z66\" (UID: \"fbdf0c1c-4684-45ae-aedb-d91817e50576\") " pod="openstack/placement-e6cd-account-create-update-v4z66" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.379895 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbdf0c1c-4684-45ae-aedb-d91817e50576-operator-scripts\") pod \"placement-e6cd-account-create-update-v4z66\" (UID: \"fbdf0c1c-4684-45ae-aedb-d91817e50576\") " pod="openstack/placement-e6cd-account-create-update-v4z66" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.399786 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8nh2\" (UniqueName: \"kubernetes.io/projected/fbdf0c1c-4684-45ae-aedb-d91817e50576-kube-api-access-v8nh2\") pod \"placement-e6cd-account-create-update-v4z66\" (UID: \"fbdf0c1c-4684-45ae-aedb-d91817e50576\") " pod="openstack/placement-e6cd-account-create-update-v4z66" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.446181 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e6cd-account-create-update-v4z66" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.448474 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"5bb1594e739fe799aa82a8f48f853a7ecd9e6068fb751f01bd8cbf1ec5dfb599"} Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.450105 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gf624" event={"ID":"35fdb887-bef5-460d-af35-20096f2b736c","Type":"ContainerStarted","Data":"4f9a9c00ebe6dfe27b947ec0146b00dd5527f8a1daa645ba1035e98bc9742328"} Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.552249 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-gfsqx"] Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.554133 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gfsqx" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.570133 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-gfsqx"] Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.640593 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-864c-account-create-update-f9jhh"] Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.686435 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75d1ca49-ec07-400e-89fd-cb277e813e98-operator-scripts\") pod \"glance-db-create-gfsqx\" (UID: \"75d1ca49-ec07-400e-89fd-cb277e813e98\") " pod="openstack/glance-db-create-gfsqx" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.686523 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk475\" (UniqueName: \"kubernetes.io/projected/75d1ca49-ec07-400e-89fd-cb277e813e98-kube-api-access-sk475\") pod \"glance-db-create-gfsqx\" (UID: \"75d1ca49-ec07-400e-89fd-cb277e813e98\") " pod="openstack/glance-db-create-gfsqx" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.746917 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-a518-account-create-update-d86mt"] Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.756553 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a518-account-create-update-d86mt" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.763336 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.785367 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-a518-account-create-update-d86mt"] Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.790711 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk475\" (UniqueName: \"kubernetes.io/projected/75d1ca49-ec07-400e-89fd-cb277e813e98-kube-api-access-sk475\") pod \"glance-db-create-gfsqx\" (UID: \"75d1ca49-ec07-400e-89fd-cb277e813e98\") " pod="openstack/glance-db-create-gfsqx" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.790877 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75d1ca49-ec07-400e-89fd-cb277e813e98-operator-scripts\") pod \"glance-db-create-gfsqx\" (UID: \"75d1ca49-ec07-400e-89fd-cb277e813e98\") " pod="openstack/glance-db-create-gfsqx" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.791826 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75d1ca49-ec07-400e-89fd-cb277e813e98-operator-scripts\") pod \"glance-db-create-gfsqx\" (UID: \"75d1ca49-ec07-400e-89fd-cb277e813e98\") " pod="openstack/glance-db-create-gfsqx" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.838801 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk475\" (UniqueName: \"kubernetes.io/projected/75d1ca49-ec07-400e-89fd-cb277e813e98-kube-api-access-sk475\") pod \"glance-db-create-gfsqx\" (UID: \"75d1ca49-ec07-400e-89fd-cb277e813e98\") " pod="openstack/glance-db-create-gfsqx" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.880553 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gfsqx" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.892472 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zk4k\" (UniqueName: \"kubernetes.io/projected/b220588a-2f7f-4761-a3bc-d0021162cb10-kube-api-access-8zk4k\") pod \"glance-a518-account-create-update-d86mt\" (UID: \"b220588a-2f7f-4761-a3bc-d0021162cb10\") " pod="openstack/glance-a518-account-create-update-d86mt" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.892563 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b220588a-2f7f-4761-a3bc-d0021162cb10-operator-scripts\") pod \"glance-a518-account-create-update-d86mt\" (UID: \"b220588a-2f7f-4761-a3bc-d0021162cb10\") " pod="openstack/glance-a518-account-create-update-d86mt" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.895700 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-bj88z"] Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.940950 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e6cd-account-create-update-v4z66"] Dec 03 16:51:24 crc kubenswrapper[5002]: W1203 16:51:24.950224 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbdf0c1c_4684_45ae_aedb_d91817e50576.slice/crio-2ceffe9a17972ec8b52d422aa82e3e0bb2c3553fbd000aba767cec869e9cb4b2 WatchSource:0}: Error finding container 2ceffe9a17972ec8b52d422aa82e3e0bb2c3553fbd000aba767cec869e9cb4b2: Status 404 returned error can't find the container with id 2ceffe9a17972ec8b52d422aa82e3e0bb2c3553fbd000aba767cec869e9cb4b2 Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.994688 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zk4k\" (UniqueName: \"kubernetes.io/projected/b220588a-2f7f-4761-a3bc-d0021162cb10-kube-api-access-8zk4k\") pod \"glance-a518-account-create-update-d86mt\" (UID: \"b220588a-2f7f-4761-a3bc-d0021162cb10\") " pod="openstack/glance-a518-account-create-update-d86mt" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.995450 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b220588a-2f7f-4761-a3bc-d0021162cb10-operator-scripts\") pod \"glance-a518-account-create-update-d86mt\" (UID: \"b220588a-2f7f-4761-a3bc-d0021162cb10\") " pod="openstack/glance-a518-account-create-update-d86mt" Dec 03 16:51:24 crc kubenswrapper[5002]: I1203 16:51:24.996790 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b220588a-2f7f-4761-a3bc-d0021162cb10-operator-scripts\") pod \"glance-a518-account-create-update-d86mt\" (UID: \"b220588a-2f7f-4761-a3bc-d0021162cb10\") " pod="openstack/glance-a518-account-create-update-d86mt" Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.023236 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zk4k\" (UniqueName: \"kubernetes.io/projected/b220588a-2f7f-4761-a3bc-d0021162cb10-kube-api-access-8zk4k\") pod \"glance-a518-account-create-update-d86mt\" (UID: \"b220588a-2f7f-4761-a3bc-d0021162cb10\") " pod="openstack/glance-a518-account-create-update-d86mt" Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.303333 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a518-account-create-update-d86mt" Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.441236 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-gfsqx"] Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.472644 5002 generic.go:334] "Generic (PLEG): container finished" podID="35fdb887-bef5-460d-af35-20096f2b736c" containerID="c82b727bf230901a00c848a679ec9867ca63d6aded4a708f898961e39f482557" exitCode=0 Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.472769 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gf624" event={"ID":"35fdb887-bef5-460d-af35-20096f2b736c","Type":"ContainerDied","Data":"c82b727bf230901a00c848a679ec9867ca63d6aded4a708f898961e39f482557"} Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.476411 5002 generic.go:334] "Generic (PLEG): container finished" podID="fbdf0c1c-4684-45ae-aedb-d91817e50576" containerID="0e6785c90ed2e6f4994bf2fbad1046ba48cc32cec76ca773e19797a995f30fbc" exitCode=0 Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.476693 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e6cd-account-create-update-v4z66" event={"ID":"fbdf0c1c-4684-45ae-aedb-d91817e50576","Type":"ContainerDied","Data":"0e6785c90ed2e6f4994bf2fbad1046ba48cc32cec76ca773e19797a995f30fbc"} Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.476711 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e6cd-account-create-update-v4z66" event={"ID":"fbdf0c1c-4684-45ae-aedb-d91817e50576","Type":"ContainerStarted","Data":"2ceffe9a17972ec8b52d422aa82e3e0bb2c3553fbd000aba767cec869e9cb4b2"} Dec 03 16:51:25 crc kubenswrapper[5002]: E1203 16:51:25.478481 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbdf0c1c_4684_45ae_aedb_d91817e50576.slice/crio-0e6785c90ed2e6f4994bf2fbad1046ba48cc32cec76ca773e19797a995f30fbc.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c540d5c_7b6e_4fef_8aa9_bce2a4d75111.slice/crio-75263f4b67c20376ca0ea56d883165335fd8045a74dc621581aca247bdaa086c.scope\": RecentStats: unable to find data in memory cache]" Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.484422 5002 generic.go:334] "Generic (PLEG): container finished" podID="0c540d5c-7b6e-4fef-8aa9-bce2a4d75111" containerID="75263f4b67c20376ca0ea56d883165335fd8045a74dc621581aca247bdaa086c" exitCode=0 Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.484571 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bj88z" event={"ID":"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111","Type":"ContainerDied","Data":"75263f4b67c20376ca0ea56d883165335fd8045a74dc621581aca247bdaa086c"} Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.484612 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bj88z" event={"ID":"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111","Type":"ContainerStarted","Data":"05b8fa74928091b9fd219cb088e2700d117ad823ed2dd577c84838cd8c7e3092"} Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.491098 5002 generic.go:334] "Generic (PLEG): container finished" podID="fa754386-78cd-4b7a-8e5d-ba61e4f1d03f" containerID="6da7b30f7c3a208596f000e6c7c17e6eb191359f5fb8ca48a30f22f6ee2a05c8" exitCode=0 Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.491156 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-864c-account-create-update-f9jhh" event={"ID":"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f","Type":"ContainerDied","Data":"6da7b30f7c3a208596f000e6c7c17e6eb191359f5fb8ca48a30f22f6ee2a05c8"} Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.491183 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-864c-account-create-update-f9jhh" event={"ID":"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f","Type":"ContainerStarted","Data":"b61eee3da9f3c3cc3e32f1e23f683aa85e289ba3d6b098d45150575611f39f36"} Dec 03 16:51:25 crc kubenswrapper[5002]: W1203 16:51:25.711928 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75d1ca49_ec07_400e_89fd_cb277e813e98.slice/crio-69253c262b5efc4e5340cedca796972c60e057dd4efc20b3154c04e5d4ba5544 WatchSource:0}: Error finding container 69253c262b5efc4e5340cedca796972c60e057dd4efc20b3154c04e5d4ba5544: Status 404 returned error can't find the container with id 69253c262b5efc4e5340cedca796972c60e057dd4efc20b3154c04e5d4ba5544 Dec 03 16:51:25 crc kubenswrapper[5002]: I1203 16:51:25.771361 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-a518-account-create-update-d86mt"] Dec 03 16:51:25 crc kubenswrapper[5002]: W1203 16:51:25.782024 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb220588a_2f7f_4761_a3bc_d0021162cb10.slice/crio-5a5d37b32e120695d72e1ba921c368badddef4448009d1d9056a311d10f52ff1 WatchSource:0}: Error finding container 5a5d37b32e120695d72e1ba921c368badddef4448009d1d9056a311d10f52ff1: Status 404 returned error can't find the container with id 5a5d37b32e120695d72e1ba921c368badddef4448009d1d9056a311d10f52ff1 Dec 03 16:51:26 crc kubenswrapper[5002]: I1203 16:51:26.513589 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"295d415111976ea10f436b97bb0e928bbba6fc843889cfdd2399f44adfc4cf57"} Dec 03 16:51:26 crc kubenswrapper[5002]: I1203 16:51:26.518935 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"fd82b64468e87bb52951423d662c96298a04577e50fb7dfae08cf95f6cb95f60"} Dec 03 16:51:26 crc kubenswrapper[5002]: I1203 16:51:26.527065 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a518-account-create-update-d86mt" event={"ID":"b220588a-2f7f-4761-a3bc-d0021162cb10","Type":"ContainerStarted","Data":"e6fe04e95facb561195160dc1144f3a8f2fe4ee8638cfabeb6dd821713c5e1fa"} Dec 03 16:51:26 crc kubenswrapper[5002]: I1203 16:51:26.527113 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a518-account-create-update-d86mt" event={"ID":"b220588a-2f7f-4761-a3bc-d0021162cb10","Type":"ContainerStarted","Data":"5a5d37b32e120695d72e1ba921c368badddef4448009d1d9056a311d10f52ff1"} Dec 03 16:51:26 crc kubenswrapper[5002]: I1203 16:51:26.534409 5002 generic.go:334] "Generic (PLEG): container finished" podID="75d1ca49-ec07-400e-89fd-cb277e813e98" containerID="b1585b78bd2cce3b724558c2b02007ceae28e9bf1620d79698ce26bf316b9690" exitCode=0 Dec 03 16:51:26 crc kubenswrapper[5002]: I1203 16:51:26.534670 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gfsqx" event={"ID":"75d1ca49-ec07-400e-89fd-cb277e813e98","Type":"ContainerDied","Data":"b1585b78bd2cce3b724558c2b02007ceae28e9bf1620d79698ce26bf316b9690"} Dec 03 16:51:26 crc kubenswrapper[5002]: I1203 16:51:26.534706 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gfsqx" event={"ID":"75d1ca49-ec07-400e-89fd-cb277e813e98","Type":"ContainerStarted","Data":"69253c262b5efc4e5340cedca796972c60e057dd4efc20b3154c04e5d4ba5544"} Dec 03 16:51:26 crc kubenswrapper[5002]: I1203 16:51:26.558307 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-a518-account-create-update-d86mt" podStartSLOduration=2.558280421 podStartE2EDuration="2.558280421s" podCreationTimestamp="2025-12-03 16:51:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:51:26.548970577 +0000 UTC m=+1209.962792455" watchObservedRunningTime="2025-12-03 16:51:26.558280421 +0000 UTC m=+1209.972102309" Dec 03 16:51:26 crc kubenswrapper[5002]: I1203 16:51:26.981946 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gf624" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.058888 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bj88z" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.060609 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-864c-account-create-update-f9jhh" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.068083 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e6cd-account-create-update-v4z66" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.143689 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35fdb887-bef5-460d-af35-20096f2b736c-operator-scripts\") pod \"35fdb887-bef5-460d-af35-20096f2b736c\" (UID: \"35fdb887-bef5-460d-af35-20096f2b736c\") " Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.143781 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vkct\" (UniqueName: \"kubernetes.io/projected/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f-kube-api-access-4vkct\") pod \"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f\" (UID: \"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f\") " Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.143816 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111-operator-scripts\") pod \"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111\" (UID: \"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111\") " Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.143836 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzsks\" (UniqueName: \"kubernetes.io/projected/35fdb887-bef5-460d-af35-20096f2b736c-kube-api-access-mzsks\") pod \"35fdb887-bef5-460d-af35-20096f2b736c\" (UID: \"35fdb887-bef5-460d-af35-20096f2b736c\") " Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.143947 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbdf0c1c-4684-45ae-aedb-d91817e50576-operator-scripts\") pod \"fbdf0c1c-4684-45ae-aedb-d91817e50576\" (UID: \"fbdf0c1c-4684-45ae-aedb-d91817e50576\") " Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.144035 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8nh2\" (UniqueName: \"kubernetes.io/projected/fbdf0c1c-4684-45ae-aedb-d91817e50576-kube-api-access-v8nh2\") pod \"fbdf0c1c-4684-45ae-aedb-d91817e50576\" (UID: \"fbdf0c1c-4684-45ae-aedb-d91817e50576\") " Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.144066 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ttj7\" (UniqueName: \"kubernetes.io/projected/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111-kube-api-access-2ttj7\") pod \"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111\" (UID: \"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111\") " Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.144112 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f-operator-scripts\") pod \"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f\" (UID: \"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f\") " Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.145043 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0c540d5c-7b6e-4fef-8aa9-bce2a4d75111" (UID: "0c540d5c-7b6e-4fef-8aa9-bce2a4d75111"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.145134 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbdf0c1c-4684-45ae-aedb-d91817e50576-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fbdf0c1c-4684-45ae-aedb-d91817e50576" (UID: "fbdf0c1c-4684-45ae-aedb-d91817e50576"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.145237 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fa754386-78cd-4b7a-8e5d-ba61e4f1d03f" (UID: "fa754386-78cd-4b7a-8e5d-ba61e4f1d03f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.145796 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35fdb887-bef5-460d-af35-20096f2b736c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "35fdb887-bef5-460d-af35-20096f2b736c" (UID: "35fdb887-bef5-460d-af35-20096f2b736c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.152548 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbdf0c1c-4684-45ae-aedb-d91817e50576-kube-api-access-v8nh2" (OuterVolumeSpecName: "kube-api-access-v8nh2") pod "fbdf0c1c-4684-45ae-aedb-d91817e50576" (UID: "fbdf0c1c-4684-45ae-aedb-d91817e50576"). InnerVolumeSpecName "kube-api-access-v8nh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.152695 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f-kube-api-access-4vkct" (OuterVolumeSpecName: "kube-api-access-4vkct") pod "fa754386-78cd-4b7a-8e5d-ba61e4f1d03f" (UID: "fa754386-78cd-4b7a-8e5d-ba61e4f1d03f"). InnerVolumeSpecName "kube-api-access-4vkct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.152780 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111-kube-api-access-2ttj7" (OuterVolumeSpecName: "kube-api-access-2ttj7") pod "0c540d5c-7b6e-4fef-8aa9-bce2a4d75111" (UID: "0c540d5c-7b6e-4fef-8aa9-bce2a4d75111"). InnerVolumeSpecName "kube-api-access-2ttj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.152984 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35fdb887-bef5-460d-af35-20096f2b736c-kube-api-access-mzsks" (OuterVolumeSpecName: "kube-api-access-mzsks") pod "35fdb887-bef5-460d-af35-20096f2b736c" (UID: "35fdb887-bef5-460d-af35-20096f2b736c"). InnerVolumeSpecName "kube-api-access-mzsks". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.246365 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbdf0c1c-4684-45ae-aedb-d91817e50576-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.246401 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8nh2\" (UniqueName: \"kubernetes.io/projected/fbdf0c1c-4684-45ae-aedb-d91817e50576-kube-api-access-v8nh2\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.246414 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ttj7\" (UniqueName: \"kubernetes.io/projected/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111-kube-api-access-2ttj7\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.246423 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.246432 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35fdb887-bef5-460d-af35-20096f2b736c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.246441 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vkct\" (UniqueName: \"kubernetes.io/projected/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f-kube-api-access-4vkct\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.246449 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.246460 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzsks\" (UniqueName: \"kubernetes.io/projected/35fdb887-bef5-460d-af35-20096f2b736c-kube-api-access-mzsks\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.416923 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.493972 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66c567d66c-5g2mp"] Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.494351 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" podUID="63caefa0-1cea-4ecc-b8e4-6d579f56413c" containerName="dnsmasq-dns" containerID="cri-o://569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3" gracePeriod=10 Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.561799 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bj88z" event={"ID":"0c540d5c-7b6e-4fef-8aa9-bce2a4d75111","Type":"ContainerDied","Data":"05b8fa74928091b9fd219cb088e2700d117ad823ed2dd577c84838cd8c7e3092"} Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.561846 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05b8fa74928091b9fd219cb088e2700d117ad823ed2dd577c84838cd8c7e3092" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.561813 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bj88z" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.563664 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-864c-account-create-update-f9jhh" event={"ID":"fa754386-78cd-4b7a-8e5d-ba61e4f1d03f","Type":"ContainerDied","Data":"b61eee3da9f3c3cc3e32f1e23f683aa85e289ba3d6b098d45150575611f39f36"} Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.563683 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b61eee3da9f3c3cc3e32f1e23f683aa85e289ba3d6b098d45150575611f39f36" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.563733 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-864c-account-create-update-f9jhh" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.571194 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gf624" event={"ID":"35fdb887-bef5-460d-af35-20096f2b736c","Type":"ContainerDied","Data":"4f9a9c00ebe6dfe27b947ec0146b00dd5527f8a1daa645ba1035e98bc9742328"} Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.571243 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f9a9c00ebe6dfe27b947ec0146b00dd5527f8a1daa645ba1035e98bc9742328" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.571302 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gf624" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.584281 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e6cd-account-create-update-v4z66" event={"ID":"fbdf0c1c-4684-45ae-aedb-d91817e50576","Type":"ContainerDied","Data":"2ceffe9a17972ec8b52d422aa82e3e0bb2c3553fbd000aba767cec869e9cb4b2"} Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.584364 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ceffe9a17972ec8b52d422aa82e3e0bb2c3553fbd000aba767cec869e9cb4b2" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.590067 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e6cd-account-create-update-v4z66" Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.590444 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"4e9275333706b76f736d2575ea8bf99de9ae2e8e214e70763b642bd4b982174e"} Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.590517 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"8a698534e20e33c772ad47cef7ba71e2699abcc80985a3e4b85e7e699d61e5bc"} Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.592784 5002 generic.go:334] "Generic (PLEG): container finished" podID="b220588a-2f7f-4761-a3bc-d0021162cb10" containerID="e6fe04e95facb561195160dc1144f3a8f2fe4ee8638cfabeb6dd821713c5e1fa" exitCode=0 Dec 03 16:51:27 crc kubenswrapper[5002]: I1203 16:51:27.593038 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a518-account-create-update-d86mt" event={"ID":"b220588a-2f7f-4761-a3bc-d0021162cb10","Type":"ContainerDied","Data":"e6fe04e95facb561195160dc1144f3a8f2fe4ee8638cfabeb6dd821713c5e1fa"} Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.085383 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gfsqx" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.105920 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.180044 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75d1ca49-ec07-400e-89fd-cb277e813e98-operator-scripts\") pod \"75d1ca49-ec07-400e-89fd-cb277e813e98\" (UID: \"75d1ca49-ec07-400e-89fd-cb277e813e98\") " Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.180290 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsqk2\" (UniqueName: \"kubernetes.io/projected/63caefa0-1cea-4ecc-b8e4-6d579f56413c-kube-api-access-nsqk2\") pod \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\" (UID: \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\") " Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.180323 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63caefa0-1cea-4ecc-b8e4-6d579f56413c-config\") pod \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\" (UID: \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\") " Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.180514 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75d1ca49-ec07-400e-89fd-cb277e813e98-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "75d1ca49-ec07-400e-89fd-cb277e813e98" (UID: "75d1ca49-ec07-400e-89fd-cb277e813e98"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.181235 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sk475\" (UniqueName: \"kubernetes.io/projected/75d1ca49-ec07-400e-89fd-cb277e813e98-kube-api-access-sk475\") pod \"75d1ca49-ec07-400e-89fd-cb277e813e98\" (UID: \"75d1ca49-ec07-400e-89fd-cb277e813e98\") " Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.181285 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63caefa0-1cea-4ecc-b8e4-6d579f56413c-dns-svc\") pod \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\" (UID: \"63caefa0-1cea-4ecc-b8e4-6d579f56413c\") " Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.181868 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75d1ca49-ec07-400e-89fd-cb277e813e98-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.185536 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75d1ca49-ec07-400e-89fd-cb277e813e98-kube-api-access-sk475" (OuterVolumeSpecName: "kube-api-access-sk475") pod "75d1ca49-ec07-400e-89fd-cb277e813e98" (UID: "75d1ca49-ec07-400e-89fd-cb277e813e98"). InnerVolumeSpecName "kube-api-access-sk475". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.196007 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63caefa0-1cea-4ecc-b8e4-6d579f56413c-kube-api-access-nsqk2" (OuterVolumeSpecName: "kube-api-access-nsqk2") pod "63caefa0-1cea-4ecc-b8e4-6d579f56413c" (UID: "63caefa0-1cea-4ecc-b8e4-6d579f56413c"). InnerVolumeSpecName "kube-api-access-nsqk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.232789 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63caefa0-1cea-4ecc-b8e4-6d579f56413c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "63caefa0-1cea-4ecc-b8e4-6d579f56413c" (UID: "63caefa0-1cea-4ecc-b8e4-6d579f56413c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.233088 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63caefa0-1cea-4ecc-b8e4-6d579f56413c-config" (OuterVolumeSpecName: "config") pod "63caefa0-1cea-4ecc-b8e4-6d579f56413c" (UID: "63caefa0-1cea-4ecc-b8e4-6d579f56413c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.284434 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsqk2\" (UniqueName: \"kubernetes.io/projected/63caefa0-1cea-4ecc-b8e4-6d579f56413c-kube-api-access-nsqk2\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.284491 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63caefa0-1cea-4ecc-b8e4-6d579f56413c-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.284509 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sk475\" (UniqueName: \"kubernetes.io/projected/75d1ca49-ec07-400e-89fd-cb277e813e98-kube-api-access-sk475\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.284521 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63caefa0-1cea-4ecc-b8e4-6d579f56413c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.608709 5002 generic.go:334] "Generic (PLEG): container finished" podID="63caefa0-1cea-4ecc-b8e4-6d579f56413c" containerID="569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3" exitCode=0 Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.608829 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.608864 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" event={"ID":"63caefa0-1cea-4ecc-b8e4-6d579f56413c","Type":"ContainerDied","Data":"569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3"} Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.609080 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66c567d66c-5g2mp" event={"ID":"63caefa0-1cea-4ecc-b8e4-6d579f56413c","Type":"ContainerDied","Data":"ead2c500afe4972f20f1cad8b12e6877ea97b38de71bd612027bb565e8a74365"} Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.609264 5002 scope.go:117] "RemoveContainer" containerID="569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.611798 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gfsqx" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.611806 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gfsqx" event={"ID":"75d1ca49-ec07-400e-89fd-cb277e813e98","Type":"ContainerDied","Data":"69253c262b5efc4e5340cedca796972c60e057dd4efc20b3154c04e5d4ba5544"} Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.611909 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69253c262b5efc4e5340cedca796972c60e057dd4efc20b3154c04e5d4ba5544" Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.656366 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66c567d66c-5g2mp"] Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.665256 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66c567d66c-5g2mp"] Dec 03 16:51:28 crc kubenswrapper[5002]: I1203 16:51:28.865857 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63caefa0-1cea-4ecc-b8e4-6d579f56413c" path="/var/lib/kubelet/pods/63caefa0-1cea-4ecc-b8e4-6d579f56413c/volumes" Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.009912 5002 scope.go:117] "RemoveContainer" containerID="d02c07bb87547adf1a99e7e9f5b38133d510168eec4b3d128325dfbc32c027ca" Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.074716 5002 scope.go:117] "RemoveContainer" containerID="569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3" Dec 03 16:51:29 crc kubenswrapper[5002]: E1203 16:51:29.075665 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3\": container with ID starting with 569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3 not found: ID does not exist" containerID="569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3" Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.075906 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3"} err="failed to get container status \"569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3\": rpc error: code = NotFound desc = could not find container \"569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3\": container with ID starting with 569e24ab61708e3b02629fbe03adc576f9da4091ead7f539350e6967216cebd3 not found: ID does not exist" Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.075985 5002 scope.go:117] "RemoveContainer" containerID="d02c07bb87547adf1a99e7e9f5b38133d510168eec4b3d128325dfbc32c027ca" Dec 03 16:51:29 crc kubenswrapper[5002]: E1203 16:51:29.076842 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d02c07bb87547adf1a99e7e9f5b38133d510168eec4b3d128325dfbc32c027ca\": container with ID starting with d02c07bb87547adf1a99e7e9f5b38133d510168eec4b3d128325dfbc32c027ca not found: ID does not exist" containerID="d02c07bb87547adf1a99e7e9f5b38133d510168eec4b3d128325dfbc32c027ca" Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.076915 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d02c07bb87547adf1a99e7e9f5b38133d510168eec4b3d128325dfbc32c027ca"} err="failed to get container status \"d02c07bb87547adf1a99e7e9f5b38133d510168eec4b3d128325dfbc32c027ca\": rpc error: code = NotFound desc = could not find container \"d02c07bb87547adf1a99e7e9f5b38133d510168eec4b3d128325dfbc32c027ca\": container with ID starting with d02c07bb87547adf1a99e7e9f5b38133d510168eec4b3d128325dfbc32c027ca not found: ID does not exist" Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.105857 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a518-account-create-update-d86mt" Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.204472 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zk4k\" (UniqueName: \"kubernetes.io/projected/b220588a-2f7f-4761-a3bc-d0021162cb10-kube-api-access-8zk4k\") pod \"b220588a-2f7f-4761-a3bc-d0021162cb10\" (UID: \"b220588a-2f7f-4761-a3bc-d0021162cb10\") " Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.204625 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b220588a-2f7f-4761-a3bc-d0021162cb10-operator-scripts\") pod \"b220588a-2f7f-4761-a3bc-d0021162cb10\" (UID: \"b220588a-2f7f-4761-a3bc-d0021162cb10\") " Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.205596 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b220588a-2f7f-4761-a3bc-d0021162cb10-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b220588a-2f7f-4761-a3bc-d0021162cb10" (UID: "b220588a-2f7f-4761-a3bc-d0021162cb10"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.212350 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b220588a-2f7f-4761-a3bc-d0021162cb10-kube-api-access-8zk4k" (OuterVolumeSpecName: "kube-api-access-8zk4k") pod "b220588a-2f7f-4761-a3bc-d0021162cb10" (UID: "b220588a-2f7f-4761-a3bc-d0021162cb10"). InnerVolumeSpecName "kube-api-access-8zk4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.307077 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b220588a-2f7f-4761-a3bc-d0021162cb10-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.307598 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zk4k\" (UniqueName: \"kubernetes.io/projected/b220588a-2f7f-4761-a3bc-d0021162cb10-kube-api-access-8zk4k\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.631805 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"a95935c6703da840b357416ca976f8c112b4e33b4bab1975af78cf849c48467d"} Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.631853 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"a869513d51677c3c369edfd440f7ae36fb809c1de1a7e02989a558f03d0af6af"} Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.636163 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a518-account-create-update-d86mt" event={"ID":"b220588a-2f7f-4761-a3bc-d0021162cb10","Type":"ContainerDied","Data":"5a5d37b32e120695d72e1ba921c368badddef4448009d1d9056a311d10f52ff1"} Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.636228 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a5d37b32e120695d72e1ba921c368badddef4448009d1d9056a311d10f52ff1" Dec 03 16:51:29 crc kubenswrapper[5002]: I1203 16:51:29.636251 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a518-account-create-update-d86mt" Dec 03 16:51:30 crc kubenswrapper[5002]: I1203 16:51:30.651386 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"90c36b07d1cc9b89cddb1a2322982944e4e056074c328fde3c02146dc0e50229"} Dec 03 16:51:30 crc kubenswrapper[5002]: I1203 16:51:30.651996 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"74b9cad1cdf521c7bfb58575456d72fa698d2f033219c2bcb6eeb10f75b16c25"} Dec 03 16:51:31 crc kubenswrapper[5002]: I1203 16:51:31.682209 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"818f3df7318a7e5d0b01bd79d58ad702a7eb69ce025a94d68c50fb54ed6f4b7d"} Dec 03 16:51:31 crc kubenswrapper[5002]: I1203 16:51:31.682863 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"56b7bbd59ef17d3e48ad12ed59f364881c2e4bfa9e7ece40383f0ef190962e10"} Dec 03 16:51:31 crc kubenswrapper[5002]: I1203 16:51:31.682885 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"9d9bb1df438ed50da59ac696f69efde7c6a48d1828bcb3dd1620fd321d2b4d34"} Dec 03 16:51:32 crc kubenswrapper[5002]: I1203 16:51:32.714000 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"b8210908d5d09bcdac92e5295a766ab5588450583f98221265c5dc1fcef219e0"} Dec 03 16:51:32 crc kubenswrapper[5002]: I1203 16:51:32.714063 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"51ef41e140012493084ca9a5ee4771bb67457963ca1eb5c801a48e1b0525b81d"} Dec 03 16:51:32 crc kubenswrapper[5002]: I1203 16:51:32.714073 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"2259d0bf57741cf43caa6dace1c5a1419cb7906850811728b72c40313b3bf897"} Dec 03 16:51:32 crc kubenswrapper[5002]: I1203 16:51:32.714083 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerStarted","Data":"14c3879759c2b66a2378417de3744de65dc49d534bafb30583646388375fa453"} Dec 03 16:51:32 crc kubenswrapper[5002]: I1203 16:51:32.799008 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=20.106134371 podStartE2EDuration="26.798983053s" podCreationTimestamp="2025-12-03 16:51:06 +0000 UTC" firstStartedPulling="2025-12-03 16:51:24.234331083 +0000 UTC m=+1207.648152971" lastFinishedPulling="2025-12-03 16:51:30.927179735 +0000 UTC m=+1214.341001653" observedRunningTime="2025-12-03 16:51:32.783358988 +0000 UTC m=+1216.197180886" watchObservedRunningTime="2025-12-03 16:51:32.798983053 +0000 UTC m=+1216.212804951" Dec 03 16:51:32 crc kubenswrapper[5002]: I1203 16:51:32.983979 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.137910 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-864b648dc7-x77ct"] Dec 03 16:51:33 crc kubenswrapper[5002]: E1203 16:51:33.138728 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c540d5c-7b6e-4fef-8aa9-bce2a4d75111" containerName="mariadb-database-create" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.138768 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c540d5c-7b6e-4fef-8aa9-bce2a4d75111" containerName="mariadb-database-create" Dec 03 16:51:33 crc kubenswrapper[5002]: E1203 16:51:33.138784 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa754386-78cd-4b7a-8e5d-ba61e4f1d03f" containerName="mariadb-account-create-update" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.138792 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa754386-78cd-4b7a-8e5d-ba61e4f1d03f" containerName="mariadb-account-create-update" Dec 03 16:51:33 crc kubenswrapper[5002]: E1203 16:51:33.138805 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63caefa0-1cea-4ecc-b8e4-6d579f56413c" containerName="init" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.138811 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="63caefa0-1cea-4ecc-b8e4-6d579f56413c" containerName="init" Dec 03 16:51:33 crc kubenswrapper[5002]: E1203 16:51:33.138832 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35fdb887-bef5-460d-af35-20096f2b736c" containerName="mariadb-database-create" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.138838 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="35fdb887-bef5-460d-af35-20096f2b736c" containerName="mariadb-database-create" Dec 03 16:51:33 crc kubenswrapper[5002]: E1203 16:51:33.138848 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b220588a-2f7f-4761-a3bc-d0021162cb10" containerName="mariadb-account-create-update" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.138854 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b220588a-2f7f-4761-a3bc-d0021162cb10" containerName="mariadb-account-create-update" Dec 03 16:51:33 crc kubenswrapper[5002]: E1203 16:51:33.138890 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63caefa0-1cea-4ecc-b8e4-6d579f56413c" containerName="dnsmasq-dns" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.138896 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="63caefa0-1cea-4ecc-b8e4-6d579f56413c" containerName="dnsmasq-dns" Dec 03 16:51:33 crc kubenswrapper[5002]: E1203 16:51:33.138906 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75d1ca49-ec07-400e-89fd-cb277e813e98" containerName="mariadb-database-create" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.138912 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="75d1ca49-ec07-400e-89fd-cb277e813e98" containerName="mariadb-database-create" Dec 03 16:51:33 crc kubenswrapper[5002]: E1203 16:51:33.138928 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbdf0c1c-4684-45ae-aedb-d91817e50576" containerName="mariadb-account-create-update" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.138934 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbdf0c1c-4684-45ae-aedb-d91817e50576" containerName="mariadb-account-create-update" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.139102 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="63caefa0-1cea-4ecc-b8e4-6d579f56413c" containerName="dnsmasq-dns" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.139119 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c540d5c-7b6e-4fef-8aa9-bce2a4d75111" containerName="mariadb-database-create" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.139134 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbdf0c1c-4684-45ae-aedb-d91817e50576" containerName="mariadb-account-create-update" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.139147 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="75d1ca49-ec07-400e-89fd-cb277e813e98" containerName="mariadb-database-create" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.139157 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa754386-78cd-4b7a-8e5d-ba61e4f1d03f" containerName="mariadb-account-create-update" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.139169 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="35fdb887-bef5-460d-af35-20096f2b736c" containerName="mariadb-database-create" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.139184 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b220588a-2f7f-4761-a3bc-d0021162cb10" containerName="mariadb-account-create-update" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.140218 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.153316 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.174141 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-864b648dc7-x77ct"] Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.306311 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-ovsdbserver-nb\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.306443 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-config\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.306488 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-dns-svc\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.306604 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf2nd\" (UniqueName: \"kubernetes.io/projected/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-kube-api-access-gf2nd\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.306665 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-dns-swift-storage-0\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.306724 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-ovsdbserver-sb\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.408551 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-dns-swift-storage-0\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.408633 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-ovsdbserver-sb\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.408691 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-ovsdbserver-nb\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.408714 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-config\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.408767 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-dns-svc\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.408813 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf2nd\" (UniqueName: \"kubernetes.io/projected/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-kube-api-access-gf2nd\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.410246 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-dns-swift-storage-0\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.411345 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-config\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.411402 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-ovsdbserver-nb\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.412604 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-ovsdbserver-sb\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.412630 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-dns-svc\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.436741 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf2nd\" (UniqueName: \"kubernetes.io/projected/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-kube-api-access-gf2nd\") pod \"dnsmasq-dns-864b648dc7-x77ct\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.473162 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:33 crc kubenswrapper[5002]: I1203 16:51:33.954720 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-864b648dc7-x77ct"] Dec 03 16:51:33 crc kubenswrapper[5002]: W1203 16:51:33.968069 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b410b16_abbb_4b0e_ada3_70dd42a11ec2.slice/crio-907f18e4ee3f9cce1b51760fe79d46508c178db99e8e519f1435e69394685145 WatchSource:0}: Error finding container 907f18e4ee3f9cce1b51760fe79d46508c178db99e8e519f1435e69394685145: Status 404 returned error can't find the container with id 907f18e4ee3f9cce1b51760fe79d46508c178db99e8e519f1435e69394685145 Dec 03 16:51:34 crc kubenswrapper[5002]: I1203 16:51:34.745337 5002 generic.go:334] "Generic (PLEG): container finished" podID="382d6556-c45b-43dd-a4fa-16b3e91e0725" containerID="4ea3eb5ddafb1a5617812d42da8c949aec8550f891409d4712a4994fb7e7c919" exitCode=0 Dec 03 16:51:34 crc kubenswrapper[5002]: I1203 16:51:34.745589 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"382d6556-c45b-43dd-a4fa-16b3e91e0725","Type":"ContainerDied","Data":"4ea3eb5ddafb1a5617812d42da8c949aec8550f891409d4712a4994fb7e7c919"} Dec 03 16:51:34 crc kubenswrapper[5002]: I1203 16:51:34.750689 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" event={"ID":"2b410b16-abbb-4b0e-ada3-70dd42a11ec2","Type":"ContainerStarted","Data":"c2d24d81949b545d3ebc7b32c1c2b0e1fdeeedd331dbf8e044d0abff41ed4938"} Dec 03 16:51:34 crc kubenswrapper[5002]: I1203 16:51:34.750762 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" event={"ID":"2b410b16-abbb-4b0e-ada3-70dd42a11ec2","Type":"ContainerStarted","Data":"907f18e4ee3f9cce1b51760fe79d46508c178db99e8e519f1435e69394685145"} Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.109952 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-t6r47"] Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.111555 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.115433 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.117582 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-njd5b" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.126682 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-t6r47"] Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.248988 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-config-data\") pod \"glance-db-sync-t6r47\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.249520 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqg4f\" (UniqueName: \"kubernetes.io/projected/cf3779ea-e418-4c90-9c5e-74e0c8590c75-kube-api-access-dqg4f\") pod \"glance-db-sync-t6r47\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.249556 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-combined-ca-bundle\") pod \"glance-db-sync-t6r47\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.249586 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-db-sync-config-data\") pod \"glance-db-sync-t6r47\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.351905 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqg4f\" (UniqueName: \"kubernetes.io/projected/cf3779ea-e418-4c90-9c5e-74e0c8590c75-kube-api-access-dqg4f\") pod \"glance-db-sync-t6r47\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.352037 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-combined-ca-bundle\") pod \"glance-db-sync-t6r47\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.352103 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-db-sync-config-data\") pod \"glance-db-sync-t6r47\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.352210 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-config-data\") pod \"glance-db-sync-t6r47\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.356869 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-db-sync-config-data\") pod \"glance-db-sync-t6r47\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.357127 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-config-data\") pod \"glance-db-sync-t6r47\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.357536 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-combined-ca-bundle\") pod \"glance-db-sync-t6r47\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.370821 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqg4f\" (UniqueName: \"kubernetes.io/projected/cf3779ea-e418-4c90-9c5e-74e0c8590c75-kube-api-access-dqg4f\") pod \"glance-db-sync-t6r47\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.472031 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-t6r47" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.767080 5002 generic.go:334] "Generic (PLEG): container finished" podID="e7a598f1-0f32-448c-b08f-b5b0e70f583d" containerID="5caff8192bc34681b3f0760b1b195bcfacf6add52d3a669e3d1acea1cb2ca939" exitCode=0 Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.767159 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7a598f1-0f32-448c-b08f-b5b0e70f583d","Type":"ContainerDied","Data":"5caff8192bc34681b3f0760b1b195bcfacf6add52d3a669e3d1acea1cb2ca939"} Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.775851 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"382d6556-c45b-43dd-a4fa-16b3e91e0725","Type":"ContainerStarted","Data":"4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8"} Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.777084 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.783454 5002 generic.go:334] "Generic (PLEG): container finished" podID="2b410b16-abbb-4b0e-ada3-70dd42a11ec2" containerID="c2d24d81949b545d3ebc7b32c1c2b0e1fdeeedd331dbf8e044d0abff41ed4938" exitCode=0 Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.783548 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" event={"ID":"2b410b16-abbb-4b0e-ada3-70dd42a11ec2","Type":"ContainerDied","Data":"c2d24d81949b545d3ebc7b32c1c2b0e1fdeeedd331dbf8e044d0abff41ed4938"} Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.783994 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.784111 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" event={"ID":"2b410b16-abbb-4b0e-ada3-70dd42a11ec2","Type":"ContainerStarted","Data":"03b89cb2436ac0c1009dc99d0feaff080ad885db9e3bda35aa9432496182be09"} Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.844868 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" podStartSLOduration=2.844841708 podStartE2EDuration="2.844841708s" podCreationTimestamp="2025-12-03 16:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:51:35.83019746 +0000 UTC m=+1219.244019348" watchObservedRunningTime="2025-12-03 16:51:35.844841708 +0000 UTC m=+1219.258663606" Dec 03 16:51:35 crc kubenswrapper[5002]: I1203 16:51:35.885507 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=43.495226887 podStartE2EDuration="1m6.885474123s" podCreationTimestamp="2025-12-03 16:50:29 +0000 UTC" firstStartedPulling="2025-12-03 16:50:31.393741373 +0000 UTC m=+1154.807563261" lastFinishedPulling="2025-12-03 16:50:54.783988579 +0000 UTC m=+1178.197810497" observedRunningTime="2025-12-03 16:51:35.862626542 +0000 UTC m=+1219.276448460" watchObservedRunningTime="2025-12-03 16:51:35.885474123 +0000 UTC m=+1219.299296021" Dec 03 16:51:36 crc kubenswrapper[5002]: I1203 16:51:36.037729 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-t6r47"] Dec 03 16:51:36 crc kubenswrapper[5002]: W1203 16:51:36.047562 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf3779ea_e418_4c90_9c5e_74e0c8590c75.slice/crio-aec162f313774e4a42a64e6797278708ced8f698c2aa74be954ad7da7e1ac698 WatchSource:0}: Error finding container aec162f313774e4a42a64e6797278708ced8f698c2aa74be954ad7da7e1ac698: Status 404 returned error can't find the container with id aec162f313774e4a42a64e6797278708ced8f698c2aa74be954ad7da7e1ac698 Dec 03 16:51:36 crc kubenswrapper[5002]: I1203 16:51:36.796830 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-t6r47" event={"ID":"cf3779ea-e418-4c90-9c5e-74e0c8590c75","Type":"ContainerStarted","Data":"aec162f313774e4a42a64e6797278708ced8f698c2aa74be954ad7da7e1ac698"} Dec 03 16:51:36 crc kubenswrapper[5002]: I1203 16:51:36.803555 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7a598f1-0f32-448c-b08f-b5b0e70f583d","Type":"ContainerStarted","Data":"d73705cc9fa58d7cab153dcce4358e73807cf1f2ea894bb4e2604b295891a9c0"} Dec 03 16:51:36 crc kubenswrapper[5002]: I1203 16:51:36.803953 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:51:36 crc kubenswrapper[5002]: I1203 16:51:36.847406 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=44.61187985 podStartE2EDuration="1m7.847383299s" podCreationTimestamp="2025-12-03 16:50:29 +0000 UTC" firstStartedPulling="2025-12-03 16:50:31.696224459 +0000 UTC m=+1155.110046377" lastFinishedPulling="2025-12-03 16:50:54.931727938 +0000 UTC m=+1178.345549826" observedRunningTime="2025-12-03 16:51:36.845835897 +0000 UTC m=+1220.259657795" watchObservedRunningTime="2025-12-03 16:51:36.847383299 +0000 UTC m=+1220.261205187" Dec 03 16:51:39 crc kubenswrapper[5002]: I1203 16:51:39.215630 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-hnkdk" podUID="2e876c11-14f1-4e51-90a1-e2cdddc08c87" containerName="ovn-controller" probeResult="failure" output=< Dec 03 16:51:39 crc kubenswrapper[5002]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 03 16:51:39 crc kubenswrapper[5002]: > Dec 03 16:51:39 crc kubenswrapper[5002]: I1203 16:51:39.289649 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:51:43 crc kubenswrapper[5002]: I1203 16:51:43.474955 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:51:43 crc kubenswrapper[5002]: I1203 16:51:43.550618 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5fbdd8c-6nt4j"] Dec 03 16:51:43 crc kubenswrapper[5002]: I1203 16:51:43.550966 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" podUID="60bfa9cd-a2dd-4460-94b8-495a002dbf89" containerName="dnsmasq-dns" containerID="cri-o://fe2374b4357280cf4c7bbab3ad1bad480db444706c268289717988a39d61762d" gracePeriod=10 Dec 03 16:51:43 crc kubenswrapper[5002]: I1203 16:51:43.889498 5002 generic.go:334] "Generic (PLEG): container finished" podID="60bfa9cd-a2dd-4460-94b8-495a002dbf89" containerID="fe2374b4357280cf4c7bbab3ad1bad480db444706c268289717988a39d61762d" exitCode=0 Dec 03 16:51:43 crc kubenswrapper[5002]: I1203 16:51:43.889564 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" event={"ID":"60bfa9cd-a2dd-4460-94b8-495a002dbf89","Type":"ContainerDied","Data":"fe2374b4357280cf4c7bbab3ad1bad480db444706c268289717988a39d61762d"} Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.198670 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-hnkdk" podUID="2e876c11-14f1-4e51-90a1-e2cdddc08c87" containerName="ovn-controller" probeResult="failure" output=< Dec 03 16:51:44 crc kubenswrapper[5002]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 03 16:51:44 crc kubenswrapper[5002]: > Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.281473 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.613621 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-hnkdk-config-6tmmk"] Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.615074 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.619409 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.627117 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hnkdk-config-6tmmk"] Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.764574 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fd26\" (UniqueName: \"kubernetes.io/projected/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-kube-api-access-4fd26\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.764878 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-log-ovn\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.765345 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-run-ovn\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.765487 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-run\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.765550 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-scripts\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.765579 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-additional-scripts\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.869312 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fd26\" (UniqueName: \"kubernetes.io/projected/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-kube-api-access-4fd26\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.869428 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-log-ovn\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.869551 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-run-ovn\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.869616 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-run\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.869645 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-scripts\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.869704 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-additional-scripts\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.870949 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-additional-scripts\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.871400 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-run-ovn\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.871500 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-log-ovn\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.871525 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-run\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.875174 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-scripts\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.914968 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fd26\" (UniqueName: \"kubernetes.io/projected/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-kube-api-access-4fd26\") pod \"ovn-controller-hnkdk-config-6tmmk\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:44 crc kubenswrapper[5002]: I1203 16:51:44.947464 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:47 crc kubenswrapper[5002]: I1203 16:51:47.415644 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" podUID="60bfa9cd-a2dd-4460-94b8-495a002dbf89" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.117:5353: connect: connection refused" Dec 03 16:51:49 crc kubenswrapper[5002]: I1203 16:51:49.203302 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-hnkdk" podUID="2e876c11-14f1-4e51-90a1-e2cdddc08c87" containerName="ovn-controller" probeResult="failure" output=< Dec 03 16:51:49 crc kubenswrapper[5002]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 03 16:51:49 crc kubenswrapper[5002]: > Dec 03 16:51:50 crc kubenswrapper[5002]: I1203 16:51:50.765192 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 16:51:50 crc kubenswrapper[5002]: I1203 16:51:50.919264 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:51:50 crc kubenswrapper[5002]: I1203 16:51:50.919326 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.164290 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.305629 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-nnvht"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.307308 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nnvht" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.339687 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-5ccf-account-create-update-t42bt"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.343583 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-nnvht"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.343691 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5ccf-account-create-update-t42bt" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.353689 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5ccf-account-create-update-t42bt"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.367270 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.423380 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-6vv7m"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.426353 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6vv7m" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.430842 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g9tr\" (UniqueName: \"kubernetes.io/projected/6dde11f9-1724-4644-bba1-ab2e8efdee35-kube-api-access-5g9tr\") pod \"cinder-5ccf-account-create-update-t42bt\" (UID: \"6dde11f9-1724-4644-bba1-ab2e8efdee35\") " pod="openstack/cinder-5ccf-account-create-update-t42bt" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.430902 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6dde11f9-1724-4644-bba1-ab2e8efdee35-operator-scripts\") pod \"cinder-5ccf-account-create-update-t42bt\" (UID: \"6dde11f9-1724-4644-bba1-ab2e8efdee35\") " pod="openstack/cinder-5ccf-account-create-update-t42bt" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.430995 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fr4z\" (UniqueName: \"kubernetes.io/projected/63afe065-7146-41e8-b5d9-898d9492f88e-kube-api-access-5fr4z\") pod \"cinder-db-create-nnvht\" (UID: \"63afe065-7146-41e8-b5d9-898d9492f88e\") " pod="openstack/cinder-db-create-nnvht" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.431036 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63afe065-7146-41e8-b5d9-898d9492f88e-operator-scripts\") pod \"cinder-db-create-nnvht\" (UID: \"63afe065-7146-41e8-b5d9-898d9492f88e\") " pod="openstack/cinder-db-create-nnvht" Dec 03 16:51:51 crc kubenswrapper[5002]: E1203 16:51:51.433849 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api@sha256:481073ac9deefb38bbd989aaa8dd7aedb4e0af26017f4883f85fce433380bf63" Dec 03 16:51:51 crc kubenswrapper[5002]: E1203 16:51:51.434318 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:481073ac9deefb38bbd989aaa8dd7aedb4e0af26017f4883f85fce433380bf63,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dqg4f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-t6r47_openstack(cf3779ea-e418-4c90-9c5e-74e0c8590c75): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:51:51 crc kubenswrapper[5002]: E1203 16:51:51.435773 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-t6r47" podUID="cf3779ea-e418-4c90-9c5e-74e0c8590c75" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.533321 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fr4z\" (UniqueName: \"kubernetes.io/projected/63afe065-7146-41e8-b5d9-898d9492f88e-kube-api-access-5fr4z\") pod \"cinder-db-create-nnvht\" (UID: \"63afe065-7146-41e8-b5d9-898d9492f88e\") " pod="openstack/cinder-db-create-nnvht" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.533397 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kffzf\" (UniqueName: \"kubernetes.io/projected/b47a6833-493a-4bcf-9287-617fd78f4c40-kube-api-access-kffzf\") pod \"barbican-db-create-6vv7m\" (UID: \"b47a6833-493a-4bcf-9287-617fd78f4c40\") " pod="openstack/barbican-db-create-6vv7m" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.533436 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63afe065-7146-41e8-b5d9-898d9492f88e-operator-scripts\") pod \"cinder-db-create-nnvht\" (UID: \"63afe065-7146-41e8-b5d9-898d9492f88e\") " pod="openstack/cinder-db-create-nnvht" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.533504 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b47a6833-493a-4bcf-9287-617fd78f4c40-operator-scripts\") pod \"barbican-db-create-6vv7m\" (UID: \"b47a6833-493a-4bcf-9287-617fd78f4c40\") " pod="openstack/barbican-db-create-6vv7m" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.533538 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g9tr\" (UniqueName: \"kubernetes.io/projected/6dde11f9-1724-4644-bba1-ab2e8efdee35-kube-api-access-5g9tr\") pod \"cinder-5ccf-account-create-update-t42bt\" (UID: \"6dde11f9-1724-4644-bba1-ab2e8efdee35\") " pod="openstack/cinder-5ccf-account-create-update-t42bt" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.533569 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6dde11f9-1724-4644-bba1-ab2e8efdee35-operator-scripts\") pod \"cinder-5ccf-account-create-update-t42bt\" (UID: \"6dde11f9-1724-4644-bba1-ab2e8efdee35\") " pod="openstack/cinder-5ccf-account-create-update-t42bt" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.539071 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6dde11f9-1724-4644-bba1-ab2e8efdee35-operator-scripts\") pod \"cinder-5ccf-account-create-update-t42bt\" (UID: \"6dde11f9-1724-4644-bba1-ab2e8efdee35\") " pod="openstack/cinder-5ccf-account-create-update-t42bt" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.539614 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8d7a-account-create-update-dnhkq"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.541189 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63afe065-7146-41e8-b5d9-898d9492f88e-operator-scripts\") pod \"cinder-db-create-nnvht\" (UID: \"63afe065-7146-41e8-b5d9-898d9492f88e\") " pod="openstack/cinder-db-create-nnvht" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.541908 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8d7a-account-create-update-dnhkq" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.549460 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.552244 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6vv7m"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.559800 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8d7a-account-create-update-dnhkq"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.582261 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fr4z\" (UniqueName: \"kubernetes.io/projected/63afe065-7146-41e8-b5d9-898d9492f88e-kube-api-access-5fr4z\") pod \"cinder-db-create-nnvht\" (UID: \"63afe065-7146-41e8-b5d9-898d9492f88e\") " pod="openstack/cinder-db-create-nnvht" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.594322 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g9tr\" (UniqueName: \"kubernetes.io/projected/6dde11f9-1724-4644-bba1-ab2e8efdee35-kube-api-access-5g9tr\") pod \"cinder-5ccf-account-create-update-t42bt\" (UID: \"6dde11f9-1724-4644-bba1-ab2e8efdee35\") " pod="openstack/cinder-5ccf-account-create-update-t42bt" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.642078 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e39fcd9c-ea0f-4426-b6ec-97058995e105-operator-scripts\") pod \"barbican-8d7a-account-create-update-dnhkq\" (UID: \"e39fcd9c-ea0f-4426-b6ec-97058995e105\") " pod="openstack/barbican-8d7a-account-create-update-dnhkq" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.642207 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kffzf\" (UniqueName: \"kubernetes.io/projected/b47a6833-493a-4bcf-9287-617fd78f4c40-kube-api-access-kffzf\") pod \"barbican-db-create-6vv7m\" (UID: \"b47a6833-493a-4bcf-9287-617fd78f4c40\") " pod="openstack/barbican-db-create-6vv7m" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.642297 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b47a6833-493a-4bcf-9287-617fd78f4c40-operator-scripts\") pod \"barbican-db-create-6vv7m\" (UID: \"b47a6833-493a-4bcf-9287-617fd78f4c40\") " pod="openstack/barbican-db-create-6vv7m" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.642342 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8jcx\" (UniqueName: \"kubernetes.io/projected/e39fcd9c-ea0f-4426-b6ec-97058995e105-kube-api-access-l8jcx\") pod \"barbican-8d7a-account-create-update-dnhkq\" (UID: \"e39fcd9c-ea0f-4426-b6ec-97058995e105\") " pod="openstack/barbican-8d7a-account-create-update-dnhkq" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.653799 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b47a6833-493a-4bcf-9287-617fd78f4c40-operator-scripts\") pod \"barbican-db-create-6vv7m\" (UID: \"b47a6833-493a-4bcf-9287-617fd78f4c40\") " pod="openstack/barbican-db-create-6vv7m" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.672411 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kffzf\" (UniqueName: \"kubernetes.io/projected/b47a6833-493a-4bcf-9287-617fd78f4c40-kube-api-access-kffzf\") pod \"barbican-db-create-6vv7m\" (UID: \"b47a6833-493a-4bcf-9287-617fd78f4c40\") " pod="openstack/barbican-db-create-6vv7m" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.716813 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-x6tvw"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.718284 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-x6tvw" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.734956 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-b96b-account-create-update-bnk5v"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.736589 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b96b-account-create-update-bnk5v" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.744927 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e39fcd9c-ea0f-4426-b6ec-97058995e105-operator-scripts\") pod \"barbican-8d7a-account-create-update-dnhkq\" (UID: \"e39fcd9c-ea0f-4426-b6ec-97058995e105\") " pod="openstack/barbican-8d7a-account-create-update-dnhkq" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.745021 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8jcx\" (UniqueName: \"kubernetes.io/projected/e39fcd9c-ea0f-4426-b6ec-97058995e105-kube-api-access-l8jcx\") pod \"barbican-8d7a-account-create-update-dnhkq\" (UID: \"e39fcd9c-ea0f-4426-b6ec-97058995e105\") " pod="openstack/barbican-8d7a-account-create-update-dnhkq" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.746154 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e39fcd9c-ea0f-4426-b6ec-97058995e105-operator-scripts\") pod \"barbican-8d7a-account-create-update-dnhkq\" (UID: \"e39fcd9c-ea0f-4426-b6ec-97058995e105\") " pod="openstack/barbican-8d7a-account-create-update-dnhkq" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.751098 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.756819 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-x6tvw"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.774371 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-76gsx"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.775795 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-76gsx" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.781515 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.781963 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6l4ll" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.782111 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.782215 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.782784 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nnvht" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.810619 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b96b-account-create-update-bnk5v"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.826595 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8jcx\" (UniqueName: \"kubernetes.io/projected/e39fcd9c-ea0f-4426-b6ec-97058995e105-kube-api-access-l8jcx\") pod \"barbican-8d7a-account-create-update-dnhkq\" (UID: \"e39fcd9c-ea0f-4426-b6ec-97058995e105\") " pod="openstack/barbican-8d7a-account-create-update-dnhkq" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.827516 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5ccf-account-create-update-t42bt" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.843964 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6vv7m" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.847992 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-config-data\") pod \"keystone-db-sync-76gsx\" (UID: \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\") " pod="openstack/keystone-db-sync-76gsx" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.848109 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0cdc857-0262-4cb5-b4a9-0c28ea317db9-operator-scripts\") pod \"neutron-db-create-x6tvw\" (UID: \"a0cdc857-0262-4cb5-b4a9-0c28ea317db9\") " pod="openstack/neutron-db-create-x6tvw" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.848153 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skcpg\" (UniqueName: \"kubernetes.io/projected/a0cdc857-0262-4cb5-b4a9-0c28ea317db9-kube-api-access-skcpg\") pod \"neutron-db-create-x6tvw\" (UID: \"a0cdc857-0262-4cb5-b4a9-0c28ea317db9\") " pod="openstack/neutron-db-create-x6tvw" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.848183 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-combined-ca-bundle\") pod \"keystone-db-sync-76gsx\" (UID: \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\") " pod="openstack/keystone-db-sync-76gsx" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.848232 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmbxl\" (UniqueName: \"kubernetes.io/projected/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-kube-api-access-rmbxl\") pod \"keystone-db-sync-76gsx\" (UID: \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\") " pod="openstack/keystone-db-sync-76gsx" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.848257 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/684116e4-56a2-4ac4-b802-d342d50db63f-operator-scripts\") pod \"neutron-b96b-account-create-update-bnk5v\" (UID: \"684116e4-56a2-4ac4-b802-d342d50db63f\") " pod="openstack/neutron-b96b-account-create-update-bnk5v" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.848310 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz27w\" (UniqueName: \"kubernetes.io/projected/684116e4-56a2-4ac4-b802-d342d50db63f-kube-api-access-jz27w\") pod \"neutron-b96b-account-create-update-bnk5v\" (UID: \"684116e4-56a2-4ac4-b802-d342d50db63f\") " pod="openstack/neutron-b96b-account-create-update-bnk5v" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.872280 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8d7a-account-create-update-dnhkq" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.941921 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-76gsx"] Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.950414 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-config-data\") pod \"keystone-db-sync-76gsx\" (UID: \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\") " pod="openstack/keystone-db-sync-76gsx" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.950480 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0cdc857-0262-4cb5-b4a9-0c28ea317db9-operator-scripts\") pod \"neutron-db-create-x6tvw\" (UID: \"a0cdc857-0262-4cb5-b4a9-0c28ea317db9\") " pod="openstack/neutron-db-create-x6tvw" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.950504 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skcpg\" (UniqueName: \"kubernetes.io/projected/a0cdc857-0262-4cb5-b4a9-0c28ea317db9-kube-api-access-skcpg\") pod \"neutron-db-create-x6tvw\" (UID: \"a0cdc857-0262-4cb5-b4a9-0c28ea317db9\") " pod="openstack/neutron-db-create-x6tvw" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.950524 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-combined-ca-bundle\") pod \"keystone-db-sync-76gsx\" (UID: \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\") " pod="openstack/keystone-db-sync-76gsx" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.950561 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/684116e4-56a2-4ac4-b802-d342d50db63f-operator-scripts\") pod \"neutron-b96b-account-create-update-bnk5v\" (UID: \"684116e4-56a2-4ac4-b802-d342d50db63f\") " pod="openstack/neutron-b96b-account-create-update-bnk5v" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.950581 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmbxl\" (UniqueName: \"kubernetes.io/projected/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-kube-api-access-rmbxl\") pod \"keystone-db-sync-76gsx\" (UID: \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\") " pod="openstack/keystone-db-sync-76gsx" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.950629 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz27w\" (UniqueName: \"kubernetes.io/projected/684116e4-56a2-4ac4-b802-d342d50db63f-kube-api-access-jz27w\") pod \"neutron-b96b-account-create-update-bnk5v\" (UID: \"684116e4-56a2-4ac4-b802-d342d50db63f\") " pod="openstack/neutron-b96b-account-create-update-bnk5v" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.953309 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/684116e4-56a2-4ac4-b802-d342d50db63f-operator-scripts\") pod \"neutron-b96b-account-create-update-bnk5v\" (UID: \"684116e4-56a2-4ac4-b802-d342d50db63f\") " pod="openstack/neutron-b96b-account-create-update-bnk5v" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.958261 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0cdc857-0262-4cb5-b4a9-0c28ea317db9-operator-scripts\") pod \"neutron-db-create-x6tvw\" (UID: \"a0cdc857-0262-4cb5-b4a9-0c28ea317db9\") " pod="openstack/neutron-db-create-x6tvw" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.964667 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-combined-ca-bundle\") pod \"keystone-db-sync-76gsx\" (UID: \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\") " pod="openstack/keystone-db-sync-76gsx" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.964742 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-config-data\") pod \"keystone-db-sync-76gsx\" (UID: \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\") " pod="openstack/keystone-db-sync-76gsx" Dec 03 16:51:51 crc kubenswrapper[5002]: I1203 16:51:51.982711 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz27w\" (UniqueName: \"kubernetes.io/projected/684116e4-56a2-4ac4-b802-d342d50db63f-kube-api-access-jz27w\") pod \"neutron-b96b-account-create-update-bnk5v\" (UID: \"684116e4-56a2-4ac4-b802-d342d50db63f\") " pod="openstack/neutron-b96b-account-create-update-bnk5v" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.006685 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skcpg\" (UniqueName: \"kubernetes.io/projected/a0cdc857-0262-4cb5-b4a9-0c28ea317db9-kube-api-access-skcpg\") pod \"neutron-db-create-x6tvw\" (UID: \"a0cdc857-0262-4cb5-b4a9-0c28ea317db9\") " pod="openstack/neutron-db-create-x6tvw" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.011051 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmbxl\" (UniqueName: \"kubernetes.io/projected/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-kube-api-access-rmbxl\") pod \"keystone-db-sync-76gsx\" (UID: \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\") " pod="openstack/keystone-db-sync-76gsx" Dec 03 16:51:52 crc kubenswrapper[5002]: E1203 16:51:52.067084 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api@sha256:481073ac9deefb38bbd989aaa8dd7aedb4e0af26017f4883f85fce433380bf63\\\"\"" pod="openstack/glance-db-sync-t6r47" podUID="cf3779ea-e418-4c90-9c5e-74e0c8590c75" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.070334 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-x6tvw" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.076171 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hnkdk-config-6tmmk"] Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.117594 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b96b-account-create-update-bnk5v" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.152938 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.156336 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-76gsx" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.255827 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmsg2\" (UniqueName: \"kubernetes.io/projected/60bfa9cd-a2dd-4460-94b8-495a002dbf89-kube-api-access-dmsg2\") pod \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.255895 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-config\") pod \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.255925 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-ovsdbserver-nb\") pod \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.255992 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-dns-svc\") pod \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.256025 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-ovsdbserver-sb\") pod \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\" (UID: \"60bfa9cd-a2dd-4460-94b8-495a002dbf89\") " Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.283251 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60bfa9cd-a2dd-4460-94b8-495a002dbf89-kube-api-access-dmsg2" (OuterVolumeSpecName: "kube-api-access-dmsg2") pod "60bfa9cd-a2dd-4460-94b8-495a002dbf89" (UID: "60bfa9cd-a2dd-4460-94b8-495a002dbf89"). InnerVolumeSpecName "kube-api-access-dmsg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.338806 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-config" (OuterVolumeSpecName: "config") pod "60bfa9cd-a2dd-4460-94b8-495a002dbf89" (UID: "60bfa9cd-a2dd-4460-94b8-495a002dbf89"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.343786 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "60bfa9cd-a2dd-4460-94b8-495a002dbf89" (UID: "60bfa9cd-a2dd-4460-94b8-495a002dbf89"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.358296 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmsg2\" (UniqueName: \"kubernetes.io/projected/60bfa9cd-a2dd-4460-94b8-495a002dbf89-kube-api-access-dmsg2\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.358345 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.358356 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.382658 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "60bfa9cd-a2dd-4460-94b8-495a002dbf89" (UID: "60bfa9cd-a2dd-4460-94b8-495a002dbf89"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.401249 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "60bfa9cd-a2dd-4460-94b8-495a002dbf89" (UID: "60bfa9cd-a2dd-4460-94b8-495a002dbf89"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.461993 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:52 crc kubenswrapper[5002]: I1203 16:51:52.462413 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/60bfa9cd-a2dd-4460-94b8-495a002dbf89-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:52.814487 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-nnvht"] Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:52.860759 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6vv7m"] Dec 03 16:51:53 crc kubenswrapper[5002]: W1203 16:51:52.878814 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb47a6833_493a_4bcf_9287_617fd78f4c40.slice/crio-f7967f1ce2f517516f200a6a8ac856b10270c44a0d0017f23ecc90b51eb4d9de WatchSource:0}: Error finding container f7967f1ce2f517516f200a6a8ac856b10270c44a0d0017f23ecc90b51eb4d9de: Status 404 returned error can't find the container with id f7967f1ce2f517516f200a6a8ac856b10270c44a0d0017f23ecc90b51eb4d9de Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:52.890757 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5ccf-account-create-update-t42bt"] Dec 03 16:51:53 crc kubenswrapper[5002]: W1203 16:51:52.895390 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6dde11f9_1724_4644_bba1_ab2e8efdee35.slice/crio-9ac7cec535c75b5c32eada4ab5cbe7ce450a81e79687eeaae863a2a90ed9b1ef WatchSource:0}: Error finding container 9ac7cec535c75b5c32eada4ab5cbe7ce450a81e79687eeaae863a2a90ed9b1ef: Status 404 returned error can't find the container with id 9ac7cec535c75b5c32eada4ab5cbe7ce450a81e79687eeaae863a2a90ed9b1ef Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.098361 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hnkdk-config-6tmmk" event={"ID":"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb","Type":"ContainerStarted","Data":"5e5785af5ff0258b90954215a1483b5a6f295554f2fbe146b89f4c987082a912"} Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.098450 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hnkdk-config-6tmmk" event={"ID":"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb","Type":"ContainerStarted","Data":"7a7b92225076e15d4d34e07ec5742912b4d12e881a857e218f5b5ec594f8204c"} Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.127803 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" event={"ID":"60bfa9cd-a2dd-4460-94b8-495a002dbf89","Type":"ContainerDied","Data":"d1923fdf0ab332898148191aaba3ecd4ecc2c6310ef8790cb287181efeac37be"} Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.127881 5002 scope.go:117] "RemoveContainer" containerID="fe2374b4357280cf4c7bbab3ad1bad480db444706c268289717988a39d61762d" Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.128146 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5fbdd8c-6nt4j" Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.138254 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6vv7m" event={"ID":"b47a6833-493a-4bcf-9287-617fd78f4c40","Type":"ContainerStarted","Data":"f7967f1ce2f517516f200a6a8ac856b10270c44a0d0017f23ecc90b51eb4d9de"} Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.142408 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-hnkdk-config-6tmmk" podStartSLOduration=9.142392154 podStartE2EDuration="9.142392154s" podCreationTimestamp="2025-12-03 16:51:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:51:53.126359488 +0000 UTC m=+1236.540181376" watchObservedRunningTime="2025-12-03 16:51:53.142392154 +0000 UTC m=+1236.556214042" Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.155163 5002 scope.go:117] "RemoveContainer" containerID="02e69d3601c9b6e85702d77583a2b6d167247409b34fa1a2db136d575b2e3286" Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.160719 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5ccf-account-create-update-t42bt" event={"ID":"6dde11f9-1724-4644-bba1-ab2e8efdee35","Type":"ContainerStarted","Data":"9ac7cec535c75b5c32eada4ab5cbe7ce450a81e79687eeaae863a2a90ed9b1ef"} Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.164789 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nnvht" event={"ID":"63afe065-7146-41e8-b5d9-898d9492f88e","Type":"ContainerStarted","Data":"00ac687bed7ef74a015c208005c87eae6d29b96f17d548863db8f6560319ef92"} Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.185155 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5fbdd8c-6nt4j"] Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.200292 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59d5fbdd8c-6nt4j"] Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.202646 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-nnvht" podStartSLOduration=2.202616732 podStartE2EDuration="2.202616732s" podCreationTimestamp="2025-12-03 16:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:51:53.184051767 +0000 UTC m=+1236.597873655" watchObservedRunningTime="2025-12-03 16:51:53.202616732 +0000 UTC m=+1236.616438620" Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.711781 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8d7a-account-create-update-dnhkq"] Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.723293 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-x6tvw"] Dec 03 16:51:53 crc kubenswrapper[5002]: W1203 16:51:53.747817 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode39fcd9c_ea0f_4426_b6ec_97058995e105.slice/crio-7ed3cb2605b4f41f020bb71330d512513e73803c47ea1721033874502206a019 WatchSource:0}: Error finding container 7ed3cb2605b4f41f020bb71330d512513e73803c47ea1721033874502206a019: Status 404 returned error can't find the container with id 7ed3cb2605b4f41f020bb71330d512513e73803c47ea1721033874502206a019 Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.969568 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-76gsx"] Dec 03 16:51:53 crc kubenswrapper[5002]: I1203 16:51:53.978137 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b96b-account-create-update-bnk5v"] Dec 03 16:51:53 crc kubenswrapper[5002]: W1203 16:51:53.988431 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd7b9f97_5f8f_4a6c_9151_6ec342c79d14.slice/crio-967b8096fc2278ba393a65b4cb779c5ad319cc5373097cdebdcf2de623678594 WatchSource:0}: Error finding container 967b8096fc2278ba393a65b4cb779c5ad319cc5373097cdebdcf2de623678594: Status 404 returned error can't find the container with id 967b8096fc2278ba393a65b4cb779c5ad319cc5373097cdebdcf2de623678594 Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.203914 5002 generic.go:334] "Generic (PLEG): container finished" podID="b47a6833-493a-4bcf-9287-617fd78f4c40" containerID="3de4372fa6435c63ccfedbb7394ede42734a75e8fd51c6d1fc2423cf3914ea49" exitCode=0 Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.203994 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6vv7m" event={"ID":"b47a6833-493a-4bcf-9287-617fd78f4c40","Type":"ContainerDied","Data":"3de4372fa6435c63ccfedbb7394ede42734a75e8fd51c6d1fc2423cf3914ea49"} Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.226415 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nnvht" event={"ID":"63afe065-7146-41e8-b5d9-898d9492f88e","Type":"ContainerStarted","Data":"3dbb19429b734d264cde2745df385c89624a3724f50dda228f0605c865d01059"} Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.238099 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-76gsx" event={"ID":"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14","Type":"ContainerStarted","Data":"967b8096fc2278ba393a65b4cb779c5ad319cc5373097cdebdcf2de623678594"} Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.240522 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b96b-account-create-update-bnk5v" event={"ID":"684116e4-56a2-4ac4-b802-d342d50db63f","Type":"ContainerStarted","Data":"30f365f5f01a6a398d50ce3f2b107b0bc044411536105b1db1ce50a85d1ddb5a"} Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.264941 5002 generic.go:334] "Generic (PLEG): container finished" podID="24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb" containerID="5e5785af5ff0258b90954215a1483b5a6f295554f2fbe146b89f4c987082a912" exitCode=0 Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.265419 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hnkdk-config-6tmmk" event={"ID":"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb","Type":"ContainerDied","Data":"5e5785af5ff0258b90954215a1483b5a6f295554f2fbe146b89f4c987082a912"} Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.298592 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5ccf-account-create-update-t42bt" event={"ID":"6dde11f9-1724-4644-bba1-ab2e8efdee35","Type":"ContainerStarted","Data":"791d6ff26ce51c06773571d5e750ad0115fba29c9dd734aac17971ba7ba34775"} Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.308007 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-hnkdk" Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.331824 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-x6tvw" event={"ID":"a0cdc857-0262-4cb5-b4a9-0c28ea317db9","Type":"ContainerStarted","Data":"6b3f0d7d6e788f75ffa87f8e12172fa60b8833661209da6094abe1c76d29ab42"} Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.348511 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8d7a-account-create-update-dnhkq" event={"ID":"e39fcd9c-ea0f-4426-b6ec-97058995e105","Type":"ContainerStarted","Data":"7ed3cb2605b4f41f020bb71330d512513e73803c47ea1721033874502206a019"} Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.356226 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-5ccf-account-create-update-t42bt" podStartSLOduration=3.356198352 podStartE2EDuration="3.356198352s" podCreationTimestamp="2025-12-03 16:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:51:54.333653639 +0000 UTC m=+1237.747475527" watchObservedRunningTime="2025-12-03 16:51:54.356198352 +0000 UTC m=+1237.770020240" Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.378773 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-x6tvw" podStartSLOduration=3.378727495 podStartE2EDuration="3.378727495s" podCreationTimestamp="2025-12-03 16:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:51:54.35758364 +0000 UTC m=+1237.771405528" watchObservedRunningTime="2025-12-03 16:51:54.378727495 +0000 UTC m=+1237.792549383" Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.426473 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-8d7a-account-create-update-dnhkq" podStartSLOduration=3.426440383 podStartE2EDuration="3.426440383s" podCreationTimestamp="2025-12-03 16:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:51:54.419472073 +0000 UTC m=+1237.833293961" watchObservedRunningTime="2025-12-03 16:51:54.426440383 +0000 UTC m=+1237.840262271" Dec 03 16:51:54 crc kubenswrapper[5002]: I1203 16:51:54.858664 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60bfa9cd-a2dd-4460-94b8-495a002dbf89" path="/var/lib/kubelet/pods/60bfa9cd-a2dd-4460-94b8-495a002dbf89/volumes" Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.370421 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-x6tvw" event={"ID":"a0cdc857-0262-4cb5-b4a9-0c28ea317db9","Type":"ContainerDied","Data":"0157e700031fbc06f5beb9f7261df49128aab542f40b669d4aa44d1b96123f77"} Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.370278 5002 generic.go:334] "Generic (PLEG): container finished" podID="a0cdc857-0262-4cb5-b4a9-0c28ea317db9" containerID="0157e700031fbc06f5beb9f7261df49128aab542f40b669d4aa44d1b96123f77" exitCode=0 Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.376481 5002 generic.go:334] "Generic (PLEG): container finished" podID="e39fcd9c-ea0f-4426-b6ec-97058995e105" containerID="36d298104f43453d78692e9f0dc49ecd5f2e7e541d036393de895d80b2edf9d1" exitCode=0 Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.376551 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8d7a-account-create-update-dnhkq" event={"ID":"e39fcd9c-ea0f-4426-b6ec-97058995e105","Type":"ContainerDied","Data":"36d298104f43453d78692e9f0dc49ecd5f2e7e541d036393de895d80b2edf9d1"} Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.379033 5002 generic.go:334] "Generic (PLEG): container finished" podID="684116e4-56a2-4ac4-b802-d342d50db63f" containerID="db108b919c7c5b3761fdde4e5ce0a4cbaee1b3800fbe8adf9e1e75b30601967a" exitCode=0 Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.379045 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b96b-account-create-update-bnk5v" event={"ID":"684116e4-56a2-4ac4-b802-d342d50db63f","Type":"ContainerDied","Data":"db108b919c7c5b3761fdde4e5ce0a4cbaee1b3800fbe8adf9e1e75b30601967a"} Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.387094 5002 generic.go:334] "Generic (PLEG): container finished" podID="6dde11f9-1724-4644-bba1-ab2e8efdee35" containerID="791d6ff26ce51c06773571d5e750ad0115fba29c9dd734aac17971ba7ba34775" exitCode=0 Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.387174 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5ccf-account-create-update-t42bt" event={"ID":"6dde11f9-1724-4644-bba1-ab2e8efdee35","Type":"ContainerDied","Data":"791d6ff26ce51c06773571d5e750ad0115fba29c9dd734aac17971ba7ba34775"} Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.391069 5002 generic.go:334] "Generic (PLEG): container finished" podID="63afe065-7146-41e8-b5d9-898d9492f88e" containerID="3dbb19429b734d264cde2745df385c89624a3724f50dda228f0605c865d01059" exitCode=0 Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.391209 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nnvht" event={"ID":"63afe065-7146-41e8-b5d9-898d9492f88e","Type":"ContainerDied","Data":"3dbb19429b734d264cde2745df385c89624a3724f50dda228f0605c865d01059"} Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.879290 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6vv7m" Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.947348 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b47a6833-493a-4bcf-9287-617fd78f4c40-operator-scripts\") pod \"b47a6833-493a-4bcf-9287-617fd78f4c40\" (UID: \"b47a6833-493a-4bcf-9287-617fd78f4c40\") " Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.947706 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kffzf\" (UniqueName: \"kubernetes.io/projected/b47a6833-493a-4bcf-9287-617fd78f4c40-kube-api-access-kffzf\") pod \"b47a6833-493a-4bcf-9287-617fd78f4c40\" (UID: \"b47a6833-493a-4bcf-9287-617fd78f4c40\") " Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.948425 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b47a6833-493a-4bcf-9287-617fd78f4c40-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b47a6833-493a-4bcf-9287-617fd78f4c40" (UID: "b47a6833-493a-4bcf-9287-617fd78f4c40"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:55 crc kubenswrapper[5002]: I1203 16:51:55.955624 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b47a6833-493a-4bcf-9287-617fd78f4c40-kube-api-access-kffzf" (OuterVolumeSpecName: "kube-api-access-kffzf") pod "b47a6833-493a-4bcf-9287-617fd78f4c40" (UID: "b47a6833-493a-4bcf-9287-617fd78f4c40"). InnerVolumeSpecName "kube-api-access-kffzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.014070 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.049326 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fd26\" (UniqueName: \"kubernetes.io/projected/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-kube-api-access-4fd26\") pod \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.049531 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-scripts\") pod \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.049555 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-run-ovn\") pod \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.049681 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-additional-scripts\") pod \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.049733 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-run\") pod \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.049815 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-log-ovn\") pod \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\" (UID: \"24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb\") " Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.050257 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kffzf\" (UniqueName: \"kubernetes.io/projected/b47a6833-493a-4bcf-9287-617fd78f4c40-kube-api-access-kffzf\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.050273 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b47a6833-493a-4bcf-9287-617fd78f4c40-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.050336 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb" (UID: "24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.050376 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-run" (OuterVolumeSpecName: "var-run") pod "24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb" (UID: "24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.051088 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb" (UID: "24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.051150 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb" (UID: "24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.051304 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-scripts" (OuterVolumeSpecName: "scripts") pod "24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb" (UID: "24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.062933 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-kube-api-access-4fd26" (OuterVolumeSpecName: "kube-api-access-4fd26") pod "24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb" (UID: "24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb"). InnerVolumeSpecName "kube-api-access-4fd26". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.155120 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.155162 5002 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.155174 5002 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.155185 5002 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.155197 5002 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.155205 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fd26\" (UniqueName: \"kubernetes.io/projected/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb-kube-api-access-4fd26\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.249368 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-hnkdk-config-6tmmk"] Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.257428 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-hnkdk-config-6tmmk"] Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.404300 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a7b92225076e15d4d34e07ec5742912b4d12e881a857e218f5b5ec594f8204c" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.404813 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hnkdk-config-6tmmk" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.417533 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6vv7m" event={"ID":"b47a6833-493a-4bcf-9287-617fd78f4c40","Type":"ContainerDied","Data":"f7967f1ce2f517516f200a6a8ac856b10270c44a0d0017f23ecc90b51eb4d9de"} Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.417636 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7967f1ce2f517516f200a6a8ac856b10270c44a0d0017f23ecc90b51eb4d9de" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.417799 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6vv7m" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.868174 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb" path="/var/lib/kubelet/pods/24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb/volumes" Dec 03 16:51:56 crc kubenswrapper[5002]: I1203 16:51:56.977605 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nnvht" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.091842 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63afe065-7146-41e8-b5d9-898d9492f88e-operator-scripts\") pod \"63afe065-7146-41e8-b5d9-898d9492f88e\" (UID: \"63afe065-7146-41e8-b5d9-898d9492f88e\") " Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.092480 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fr4z\" (UniqueName: \"kubernetes.io/projected/63afe065-7146-41e8-b5d9-898d9492f88e-kube-api-access-5fr4z\") pod \"63afe065-7146-41e8-b5d9-898d9492f88e\" (UID: \"63afe065-7146-41e8-b5d9-898d9492f88e\") " Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.093315 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63afe065-7146-41e8-b5d9-898d9492f88e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "63afe065-7146-41e8-b5d9-898d9492f88e" (UID: "63afe065-7146-41e8-b5d9-898d9492f88e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.093882 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63afe065-7146-41e8-b5d9-898d9492f88e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.100308 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63afe065-7146-41e8-b5d9-898d9492f88e-kube-api-access-5fr4z" (OuterVolumeSpecName: "kube-api-access-5fr4z") pod "63afe065-7146-41e8-b5d9-898d9492f88e" (UID: "63afe065-7146-41e8-b5d9-898d9492f88e"). InnerVolumeSpecName "kube-api-access-5fr4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.185891 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8d7a-account-create-update-dnhkq" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.199159 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fr4z\" (UniqueName: \"kubernetes.io/projected/63afe065-7146-41e8-b5d9-898d9492f88e-kube-api-access-5fr4z\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.199392 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-x6tvw" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.216194 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5ccf-account-create-update-t42bt" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.221811 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b96b-account-create-update-bnk5v" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.302435 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e39fcd9c-ea0f-4426-b6ec-97058995e105-operator-scripts\") pod \"e39fcd9c-ea0f-4426-b6ec-97058995e105\" (UID: \"e39fcd9c-ea0f-4426-b6ec-97058995e105\") " Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.302596 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8jcx\" (UniqueName: \"kubernetes.io/projected/e39fcd9c-ea0f-4426-b6ec-97058995e105-kube-api-access-l8jcx\") pod \"e39fcd9c-ea0f-4426-b6ec-97058995e105\" (UID: \"e39fcd9c-ea0f-4426-b6ec-97058995e105\") " Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.302634 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0cdc857-0262-4cb5-b4a9-0c28ea317db9-operator-scripts\") pod \"a0cdc857-0262-4cb5-b4a9-0c28ea317db9\" (UID: \"a0cdc857-0262-4cb5-b4a9-0c28ea317db9\") " Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.302707 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skcpg\" (UniqueName: \"kubernetes.io/projected/a0cdc857-0262-4cb5-b4a9-0c28ea317db9-kube-api-access-skcpg\") pod \"a0cdc857-0262-4cb5-b4a9-0c28ea317db9\" (UID: \"a0cdc857-0262-4cb5-b4a9-0c28ea317db9\") " Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.302859 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz27w\" (UniqueName: \"kubernetes.io/projected/684116e4-56a2-4ac4-b802-d342d50db63f-kube-api-access-jz27w\") pod \"684116e4-56a2-4ac4-b802-d342d50db63f\" (UID: \"684116e4-56a2-4ac4-b802-d342d50db63f\") " Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.302924 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/684116e4-56a2-4ac4-b802-d342d50db63f-operator-scripts\") pod \"684116e4-56a2-4ac4-b802-d342d50db63f\" (UID: \"684116e4-56a2-4ac4-b802-d342d50db63f\") " Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.302986 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5g9tr\" (UniqueName: \"kubernetes.io/projected/6dde11f9-1724-4644-bba1-ab2e8efdee35-kube-api-access-5g9tr\") pod \"6dde11f9-1724-4644-bba1-ab2e8efdee35\" (UID: \"6dde11f9-1724-4644-bba1-ab2e8efdee35\") " Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.303141 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6dde11f9-1724-4644-bba1-ab2e8efdee35-operator-scripts\") pod \"6dde11f9-1724-4644-bba1-ab2e8efdee35\" (UID: \"6dde11f9-1724-4644-bba1-ab2e8efdee35\") " Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.304990 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dde11f9-1724-4644-bba1-ab2e8efdee35-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6dde11f9-1724-4644-bba1-ab2e8efdee35" (UID: "6dde11f9-1724-4644-bba1-ab2e8efdee35"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.305679 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e39fcd9c-ea0f-4426-b6ec-97058995e105-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e39fcd9c-ea0f-4426-b6ec-97058995e105" (UID: "e39fcd9c-ea0f-4426-b6ec-97058995e105"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.310894 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/684116e4-56a2-4ac4-b802-d342d50db63f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "684116e4-56a2-4ac4-b802-d342d50db63f" (UID: "684116e4-56a2-4ac4-b802-d342d50db63f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.314272 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/684116e4-56a2-4ac4-b802-d342d50db63f-kube-api-access-jz27w" (OuterVolumeSpecName: "kube-api-access-jz27w") pod "684116e4-56a2-4ac4-b802-d342d50db63f" (UID: "684116e4-56a2-4ac4-b802-d342d50db63f"). InnerVolumeSpecName "kube-api-access-jz27w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.315497 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0cdc857-0262-4cb5-b4a9-0c28ea317db9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a0cdc857-0262-4cb5-b4a9-0c28ea317db9" (UID: "a0cdc857-0262-4cb5-b4a9-0c28ea317db9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.317059 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dde11f9-1724-4644-bba1-ab2e8efdee35-kube-api-access-5g9tr" (OuterVolumeSpecName: "kube-api-access-5g9tr") pod "6dde11f9-1724-4644-bba1-ab2e8efdee35" (UID: "6dde11f9-1724-4644-bba1-ab2e8efdee35"). InnerVolumeSpecName "kube-api-access-5g9tr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.317109 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0cdc857-0262-4cb5-b4a9-0c28ea317db9-kube-api-access-skcpg" (OuterVolumeSpecName: "kube-api-access-skcpg") pod "a0cdc857-0262-4cb5-b4a9-0c28ea317db9" (UID: "a0cdc857-0262-4cb5-b4a9-0c28ea317db9"). InnerVolumeSpecName "kube-api-access-skcpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.320303 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e39fcd9c-ea0f-4426-b6ec-97058995e105-kube-api-access-l8jcx" (OuterVolumeSpecName: "kube-api-access-l8jcx") pod "e39fcd9c-ea0f-4426-b6ec-97058995e105" (UID: "e39fcd9c-ea0f-4426-b6ec-97058995e105"). InnerVolumeSpecName "kube-api-access-l8jcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.406524 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6dde11f9-1724-4644-bba1-ab2e8efdee35-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.406572 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e39fcd9c-ea0f-4426-b6ec-97058995e105-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.406583 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8jcx\" (UniqueName: \"kubernetes.io/projected/e39fcd9c-ea0f-4426-b6ec-97058995e105-kube-api-access-l8jcx\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.406596 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0cdc857-0262-4cb5-b4a9-0c28ea317db9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.406606 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skcpg\" (UniqueName: \"kubernetes.io/projected/a0cdc857-0262-4cb5-b4a9-0c28ea317db9-kube-api-access-skcpg\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.406615 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz27w\" (UniqueName: \"kubernetes.io/projected/684116e4-56a2-4ac4-b802-d342d50db63f-kube-api-access-jz27w\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.406623 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/684116e4-56a2-4ac4-b802-d342d50db63f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.406632 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5g9tr\" (UniqueName: \"kubernetes.io/projected/6dde11f9-1724-4644-bba1-ab2e8efdee35-kube-api-access-5g9tr\") on node \"crc\" DevicePath \"\"" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.441833 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b96b-account-create-update-bnk5v" event={"ID":"684116e4-56a2-4ac4-b802-d342d50db63f","Type":"ContainerDied","Data":"30f365f5f01a6a398d50ce3f2b107b0bc044411536105b1db1ce50a85d1ddb5a"} Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.441879 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30f365f5f01a6a398d50ce3f2b107b0bc044411536105b1db1ce50a85d1ddb5a" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.441962 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b96b-account-create-update-bnk5v" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.449057 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5ccf-account-create-update-t42bt" event={"ID":"6dde11f9-1724-4644-bba1-ab2e8efdee35","Type":"ContainerDied","Data":"9ac7cec535c75b5c32eada4ab5cbe7ce450a81e79687eeaae863a2a90ed9b1ef"} Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.449145 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5ccf-account-create-update-t42bt" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.449320 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ac7cec535c75b5c32eada4ab5cbe7ce450a81e79687eeaae863a2a90ed9b1ef" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.452716 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nnvht" event={"ID":"63afe065-7146-41e8-b5d9-898d9492f88e","Type":"ContainerDied","Data":"00ac687bed7ef74a015c208005c87eae6d29b96f17d548863db8f6560319ef92"} Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.452852 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00ac687bed7ef74a015c208005c87eae6d29b96f17d548863db8f6560319ef92" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.453105 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nnvht" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.461608 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-x6tvw" event={"ID":"a0cdc857-0262-4cb5-b4a9-0c28ea317db9","Type":"ContainerDied","Data":"6b3f0d7d6e788f75ffa87f8e12172fa60b8833661209da6094abe1c76d29ab42"} Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.461653 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b3f0d7d6e788f75ffa87f8e12172fa60b8833661209da6094abe1c76d29ab42" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.461772 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-x6tvw" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.479188 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8d7a-account-create-update-dnhkq" event={"ID":"e39fcd9c-ea0f-4426-b6ec-97058995e105","Type":"ContainerDied","Data":"7ed3cb2605b4f41f020bb71330d512513e73803c47ea1721033874502206a019"} Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.479264 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ed3cb2605b4f41f020bb71330d512513e73803c47ea1721033874502206a019" Dec 03 16:51:57 crc kubenswrapper[5002]: I1203 16:51:57.479388 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8d7a-account-create-update-dnhkq" Dec 03 16:52:03 crc kubenswrapper[5002]: I1203 16:52:03.556112 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-76gsx" event={"ID":"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14","Type":"ContainerStarted","Data":"7b6d2d9ab3498afb3255849aed275053ff6ce7a6636c6b898dbde191016b281f"} Dec 03 16:52:05 crc kubenswrapper[5002]: I1203 16:52:05.878389 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-76gsx" podStartSLOduration=6.10770861 podStartE2EDuration="14.878352174s" podCreationTimestamp="2025-12-03 16:51:51 +0000 UTC" firstStartedPulling="2025-12-03 16:51:53.994665368 +0000 UTC m=+1237.408487256" lastFinishedPulling="2025-12-03 16:52:02.765308902 +0000 UTC m=+1246.179130820" observedRunningTime="2025-12-03 16:52:03.579248313 +0000 UTC m=+1246.993070191" watchObservedRunningTime="2025-12-03 16:52:05.878352174 +0000 UTC m=+1249.292174112" Dec 03 16:52:06 crc kubenswrapper[5002]: I1203 16:52:06.597119 5002 generic.go:334] "Generic (PLEG): container finished" podID="bd7b9f97-5f8f-4a6c-9151-6ec342c79d14" containerID="7b6d2d9ab3498afb3255849aed275053ff6ce7a6636c6b898dbde191016b281f" exitCode=0 Dec 03 16:52:06 crc kubenswrapper[5002]: I1203 16:52:06.597768 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-76gsx" event={"ID":"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14","Type":"ContainerDied","Data":"7b6d2d9ab3498afb3255849aed275053ff6ce7a6636c6b898dbde191016b281f"} Dec 03 16:52:07 crc kubenswrapper[5002]: I1203 16:52:07.610330 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-t6r47" event={"ID":"cf3779ea-e418-4c90-9c5e-74e0c8590c75","Type":"ContainerStarted","Data":"5923d9aee115cda041cea2387cd5646dd110c943ef7d4fb8ab87b7b965477538"} Dec 03 16:52:07 crc kubenswrapper[5002]: I1203 16:52:07.641524 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-t6r47" podStartSLOduration=2.385271502 podStartE2EDuration="32.641500637s" podCreationTimestamp="2025-12-03 16:51:35 +0000 UTC" firstStartedPulling="2025-12-03 16:51:36.051675264 +0000 UTC m=+1219.465497152" lastFinishedPulling="2025-12-03 16:52:06.307904389 +0000 UTC m=+1249.721726287" observedRunningTime="2025-12-03 16:52:07.637173829 +0000 UTC m=+1251.050995727" watchObservedRunningTime="2025-12-03 16:52:07.641500637 +0000 UTC m=+1251.055322525" Dec 03 16:52:07 crc kubenswrapper[5002]: I1203 16:52:07.952740 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-76gsx" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.066152 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-config-data\") pod \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\" (UID: \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\") " Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.066286 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmbxl\" (UniqueName: \"kubernetes.io/projected/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-kube-api-access-rmbxl\") pod \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\" (UID: \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\") " Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.066356 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-combined-ca-bundle\") pod \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\" (UID: \"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14\") " Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.090968 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-kube-api-access-rmbxl" (OuterVolumeSpecName: "kube-api-access-rmbxl") pod "bd7b9f97-5f8f-4a6c-9151-6ec342c79d14" (UID: "bd7b9f97-5f8f-4a6c-9151-6ec342c79d14"). InnerVolumeSpecName "kube-api-access-rmbxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.096199 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd7b9f97-5f8f-4a6c-9151-6ec342c79d14" (UID: "bd7b9f97-5f8f-4a6c-9151-6ec342c79d14"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.119775 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-config-data" (OuterVolumeSpecName: "config-data") pod "bd7b9f97-5f8f-4a6c-9151-6ec342c79d14" (UID: "bd7b9f97-5f8f-4a6c-9151-6ec342c79d14"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.168452 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.168510 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmbxl\" (UniqueName: \"kubernetes.io/projected/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-kube-api-access-rmbxl\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.168534 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.635621 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-76gsx" event={"ID":"bd7b9f97-5f8f-4a6c-9151-6ec342c79d14","Type":"ContainerDied","Data":"967b8096fc2278ba393a65b4cb779c5ad319cc5373097cdebdcf2de623678594"} Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.636139 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="967b8096fc2278ba393a65b4cb779c5ad319cc5373097cdebdcf2de623678594" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.635738 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-76gsx" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.962005 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5678f567b5-bsnn2"] Dec 03 16:52:08 crc kubenswrapper[5002]: E1203 16:52:08.962420 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dde11f9-1724-4644-bba1-ab2e8efdee35" containerName="mariadb-account-create-update" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.962437 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dde11f9-1724-4644-bba1-ab2e8efdee35" containerName="mariadb-account-create-update" Dec 03 16:52:08 crc kubenswrapper[5002]: E1203 16:52:08.962451 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd7b9f97-5f8f-4a6c-9151-6ec342c79d14" containerName="keystone-db-sync" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.962458 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd7b9f97-5f8f-4a6c-9151-6ec342c79d14" containerName="keystone-db-sync" Dec 03 16:52:08 crc kubenswrapper[5002]: E1203 16:52:08.962467 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63afe065-7146-41e8-b5d9-898d9492f88e" containerName="mariadb-database-create" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.962474 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="63afe065-7146-41e8-b5d9-898d9492f88e" containerName="mariadb-database-create" Dec 03 16:52:08 crc kubenswrapper[5002]: E1203 16:52:08.962485 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60bfa9cd-a2dd-4460-94b8-495a002dbf89" containerName="dnsmasq-dns" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.962491 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="60bfa9cd-a2dd-4460-94b8-495a002dbf89" containerName="dnsmasq-dns" Dec 03 16:52:08 crc kubenswrapper[5002]: E1203 16:52:08.962501 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb" containerName="ovn-config" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.962507 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb" containerName="ovn-config" Dec 03 16:52:08 crc kubenswrapper[5002]: E1203 16:52:08.962524 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e39fcd9c-ea0f-4426-b6ec-97058995e105" containerName="mariadb-account-create-update" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.962530 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e39fcd9c-ea0f-4426-b6ec-97058995e105" containerName="mariadb-account-create-update" Dec 03 16:52:08 crc kubenswrapper[5002]: E1203 16:52:08.962544 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60bfa9cd-a2dd-4460-94b8-495a002dbf89" containerName="init" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.962549 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="60bfa9cd-a2dd-4460-94b8-495a002dbf89" containerName="init" Dec 03 16:52:08 crc kubenswrapper[5002]: E1203 16:52:08.962565 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0cdc857-0262-4cb5-b4a9-0c28ea317db9" containerName="mariadb-database-create" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.962571 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0cdc857-0262-4cb5-b4a9-0c28ea317db9" containerName="mariadb-database-create" Dec 03 16:52:08 crc kubenswrapper[5002]: E1203 16:52:08.962586 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="684116e4-56a2-4ac4-b802-d342d50db63f" containerName="mariadb-account-create-update" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.962592 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="684116e4-56a2-4ac4-b802-d342d50db63f" containerName="mariadb-account-create-update" Dec 03 16:52:08 crc kubenswrapper[5002]: E1203 16:52:08.962607 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b47a6833-493a-4bcf-9287-617fd78f4c40" containerName="mariadb-database-create" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.962613 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b47a6833-493a-4bcf-9287-617fd78f4c40" containerName="mariadb-database-create" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.970940 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd7b9f97-5f8f-4a6c-9151-6ec342c79d14" containerName="keystone-db-sync" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.971029 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b47a6833-493a-4bcf-9287-617fd78f4c40" containerName="mariadb-database-create" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.971049 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="684116e4-56a2-4ac4-b802-d342d50db63f" containerName="mariadb-account-create-update" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.971062 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dde11f9-1724-4644-bba1-ab2e8efdee35" containerName="mariadb-account-create-update" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.971085 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="63afe065-7146-41e8-b5d9-898d9492f88e" containerName="mariadb-database-create" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.971099 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0cdc857-0262-4cb5-b4a9-0c28ea317db9" containerName="mariadb-database-create" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.971111 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e39fcd9c-ea0f-4426-b6ec-97058995e105" containerName="mariadb-account-create-update" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.971125 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="60bfa9cd-a2dd-4460-94b8-495a002dbf89" containerName="dnsmasq-dns" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.971133 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="24fb22a4-d4b3-45d1-9f02-1fb9b7a2a5eb" containerName="ovn-config" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.972461 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.983958 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-dns-swift-storage-0\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.984056 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-config\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.984080 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-dns-svc\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.984097 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-ovsdbserver-sb\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.984148 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdjlv\" (UniqueName: \"kubernetes.io/projected/0816339b-4b76-4faa-aba4-63915e148d46-kube-api-access-qdjlv\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:08 crc kubenswrapper[5002]: I1203 16:52:08.984171 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-ovsdbserver-nb\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.011952 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5678f567b5-bsnn2"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.026268 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-rp6dp"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.027541 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.030808 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.034711 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6l4ll" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.047240 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.047399 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.047828 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.090578 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-dns-swift-storage-0\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.091249 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xd8zv\" (UniqueName: \"kubernetes.io/projected/ca936d36-7ef9-4262-bb55-429e789069e0-kube-api-access-xd8zv\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.091344 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-config\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.091437 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-dns-svc\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.091519 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-ovsdbserver-sb\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.091598 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-combined-ca-bundle\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.091931 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-scripts\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.092038 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-config-data\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.092178 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-fernet-keys\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.092287 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdjlv\" (UniqueName: \"kubernetes.io/projected/0816339b-4b76-4faa-aba4-63915e148d46-kube-api-access-qdjlv\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.092397 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-credential-keys\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.092500 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-ovsdbserver-nb\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.094620 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-dns-swift-storage-0\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.095172 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-config\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.095677 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-dns-svc\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.096212 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-ovsdbserver-sb\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.098570 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-rp6dp"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.105916 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-ovsdbserver-nb\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.136968 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdjlv\" (UniqueName: \"kubernetes.io/projected/0816339b-4b76-4faa-aba4-63915e148d46-kube-api-access-qdjlv\") pod \"dnsmasq-dns-5678f567b5-bsnn2\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.195452 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-combined-ca-bundle\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.195527 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-scripts\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.195762 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-config-data\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.195793 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-fernet-keys\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.195813 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-credential-keys\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.195879 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xd8zv\" (UniqueName: \"kubernetes.io/projected/ca936d36-7ef9-4262-bb55-429e789069e0-kube-api-access-xd8zv\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.213493 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-scripts\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.225107 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-credential-keys\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.227175 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-config-data\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.233690 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-combined-ca-bundle\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.259910 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-fernet-keys\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.298664 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.308450 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xd8zv\" (UniqueName: \"kubernetes.io/projected/ca936d36-7ef9-4262-bb55-429e789069e0-kube-api-access-xd8zv\") pod \"keystone-bootstrap-rp6dp\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.340199 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-7wlqt"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.341457 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.352286 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.363171 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-2x66z" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.364723 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.374413 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.389843 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-7wlqt"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.417831 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-f6jrb"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.438463 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-f6jrb"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.438621 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.447534 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.463147 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-985t8" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.463431 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.475847 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.490192 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-pk54x"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.491010 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.491511 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503099 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99fb9387-8126-480b-a909-f826c3ac626d-combined-ca-bundle\") pod \"barbican-db-sync-pk54x\" (UID: \"99fb9387-8126-480b-a909-f826c3ac626d\") " pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503175 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4cfaf87a-9c55-42fa-8083-490f6c936d04-config\") pod \"neutron-db-sync-f6jrb\" (UID: \"4cfaf87a-9c55-42fa-8083-490f6c936d04\") " pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503217 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx87q\" (UniqueName: \"kubernetes.io/projected/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-kube-api-access-qx87q\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503248 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-run-httpd\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503277 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-config-data\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503294 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-log-httpd\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503332 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-db-sync-config-data\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503363 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503417 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dk6nb\" (UniqueName: \"kubernetes.io/projected/4cfaf87a-9c55-42fa-8083-490f6c936d04-kube-api-access-dk6nb\") pod \"neutron-db-sync-f6jrb\" (UID: \"4cfaf87a-9c55-42fa-8083-490f6c936d04\") " pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503442 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-scripts\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503466 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd4zb\" (UniqueName: \"kubernetes.io/projected/99fb9387-8126-480b-a909-f826c3ac626d-kube-api-access-pd4zb\") pod \"barbican-db-sync-pk54x\" (UID: \"99fb9387-8126-480b-a909-f826c3ac626d\") " pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503498 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-combined-ca-bundle\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503522 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-config-data\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503546 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99fb9387-8126-480b-a909-f826c3ac626d-db-sync-config-data\") pod \"barbican-db-sync-pk54x\" (UID: \"99fb9387-8126-480b-a909-f826c3ac626d\") " pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503570 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cfaf87a-9c55-42fa-8083-490f6c936d04-combined-ca-bundle\") pod \"neutron-db-sync-f6jrb\" (UID: \"4cfaf87a-9c55-42fa-8083-490f6c936d04\") " pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503594 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-etc-machine-id\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503630 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcfh5\" (UniqueName: \"kubernetes.io/projected/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-kube-api-access-jcfh5\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503650 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-scripts\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.503668 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.505562 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.505784 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-sjpzf" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.520457 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.521199 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.566153 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609339 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609438 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dk6nb\" (UniqueName: \"kubernetes.io/projected/4cfaf87a-9c55-42fa-8083-490f6c936d04-kube-api-access-dk6nb\") pod \"neutron-db-sync-f6jrb\" (UID: \"4cfaf87a-9c55-42fa-8083-490f6c936d04\") " pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609462 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-scripts\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609483 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd4zb\" (UniqueName: \"kubernetes.io/projected/99fb9387-8126-480b-a909-f826c3ac626d-kube-api-access-pd4zb\") pod \"barbican-db-sync-pk54x\" (UID: \"99fb9387-8126-480b-a909-f826c3ac626d\") " pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609506 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-combined-ca-bundle\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609525 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-config-data\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609555 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99fb9387-8126-480b-a909-f826c3ac626d-db-sync-config-data\") pod \"barbican-db-sync-pk54x\" (UID: \"99fb9387-8126-480b-a909-f826c3ac626d\") " pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609571 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cfaf87a-9c55-42fa-8083-490f6c936d04-combined-ca-bundle\") pod \"neutron-db-sync-f6jrb\" (UID: \"4cfaf87a-9c55-42fa-8083-490f6c936d04\") " pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609592 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-etc-machine-id\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609631 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcfh5\" (UniqueName: \"kubernetes.io/projected/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-kube-api-access-jcfh5\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609649 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-scripts\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609665 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609700 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99fb9387-8126-480b-a909-f826c3ac626d-combined-ca-bundle\") pod \"barbican-db-sync-pk54x\" (UID: \"99fb9387-8126-480b-a909-f826c3ac626d\") " pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609730 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4cfaf87a-9c55-42fa-8083-490f6c936d04-config\") pod \"neutron-db-sync-f6jrb\" (UID: \"4cfaf87a-9c55-42fa-8083-490f6c936d04\") " pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609788 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx87q\" (UniqueName: \"kubernetes.io/projected/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-kube-api-access-qx87q\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609823 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-run-httpd\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609855 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-log-httpd\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609875 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-config-data\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.609919 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-db-sync-config-data\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.610123 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-etc-machine-id\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.626856 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-run-httpd\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.627115 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-log-httpd\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.629456 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-scripts\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.632251 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cfaf87a-9c55-42fa-8083-490f6c936d04-combined-ca-bundle\") pod \"neutron-db-sync-f6jrb\" (UID: \"4cfaf87a-9c55-42fa-8083-490f6c936d04\") " pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.634725 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99fb9387-8126-480b-a909-f826c3ac626d-db-sync-config-data\") pod \"barbican-db-sync-pk54x\" (UID: \"99fb9387-8126-480b-a909-f826c3ac626d\") " pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.637835 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-config-data\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.638887 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-config-data\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.645522 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.660671 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-pk54x"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.673238 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-db-sync-config-data\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.673284 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-combined-ca-bundle\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.674303 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-scripts\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.674549 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/4cfaf87a-9c55-42fa-8083-490f6c936d04-config\") pod \"neutron-db-sync-f6jrb\" (UID: \"4cfaf87a-9c55-42fa-8083-490f6c936d04\") " pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.678911 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.709214 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99fb9387-8126-480b-a909-f826c3ac626d-combined-ca-bundle\") pod \"barbican-db-sync-pk54x\" (UID: \"99fb9387-8126-480b-a909-f826c3ac626d\") " pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.733675 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx87q\" (UniqueName: \"kubernetes.io/projected/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-kube-api-access-qx87q\") pod \"cinder-db-sync-7wlqt\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.734385 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dk6nb\" (UniqueName: \"kubernetes.io/projected/4cfaf87a-9c55-42fa-8083-490f6c936d04-kube-api-access-dk6nb\") pod \"neutron-db-sync-f6jrb\" (UID: \"4cfaf87a-9c55-42fa-8083-490f6c936d04\") " pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.720106 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcfh5\" (UniqueName: \"kubernetes.io/projected/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-kube-api-access-jcfh5\") pod \"ceilometer-0\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.746360 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd4zb\" (UniqueName: \"kubernetes.io/projected/99fb9387-8126-480b-a909-f826c3ac626d-kube-api-access-pd4zb\") pod \"barbican-db-sync-pk54x\" (UID: \"99fb9387-8126-480b-a909-f826c3ac626d\") " pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.750784 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-s42fp"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.754177 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.769948 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.770186 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.770310 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-fxkg5" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.787248 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.854945 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5678f567b5-bsnn2"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.893195 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-config-data\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.893256 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpv9m\" (UniqueName: \"kubernetes.io/projected/3dddc7b5-9eb0-495e-b81c-45f085598280-kube-api-access-zpv9m\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.893348 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dddc7b5-9eb0-495e-b81c-45f085598280-logs\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.893601 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-combined-ca-bundle\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.896786 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.900113 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-scripts\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.896981 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.955203 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-s42fp"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.985898 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74cd4f877c-5jjbg"] Dec 03 16:52:09 crc kubenswrapper[5002]: I1203 16:52:09.987927 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.002755 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-scripts\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.018133 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-config-data\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.018186 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpv9m\" (UniqueName: \"kubernetes.io/projected/3dddc7b5-9eb0-495e-b81c-45f085598280-kube-api-access-zpv9m\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.018248 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dddc7b5-9eb0-495e-b81c-45f085598280-logs\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.018453 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-combined-ca-bundle\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.011278 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.011252 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-scripts\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.019684 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dddc7b5-9eb0-495e-b81c-45f085598280-logs\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.025118 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74cd4f877c-5jjbg"] Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.031733 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-config-data\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.046337 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpv9m\" (UniqueName: \"kubernetes.io/projected/3dddc7b5-9eb0-495e-b81c-45f085598280-kube-api-access-zpv9m\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.048279 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-combined-ca-bundle\") pod \"placement-db-sync-s42fp\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.112549 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.121263 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rstf\" (UniqueName: \"kubernetes.io/projected/461d51c2-c143-4f94-b3a9-79ccf2c9069b-kube-api-access-9rstf\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.121358 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-ovsdbserver-sb\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.121395 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-dns-svc\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.121437 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-ovsdbserver-nb\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.121457 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-dns-swift-storage-0\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.121656 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-config\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.242873 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-ovsdbserver-nb\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.242927 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-dns-swift-storage-0\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.243666 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-config\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.243802 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rstf\" (UniqueName: \"kubernetes.io/projected/461d51c2-c143-4f94-b3a9-79ccf2c9069b-kube-api-access-9rstf\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.243873 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-ovsdbserver-sb\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.243919 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-dns-svc\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.244021 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-ovsdbserver-nb\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.244590 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-config\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.245130 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-dns-svc\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.245604 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-dns-swift-storage-0\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.246647 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-ovsdbserver-sb\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.273489 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rstf\" (UniqueName: \"kubernetes.io/projected/461d51c2-c143-4f94-b3a9-79ccf2c9069b-kube-api-access-9rstf\") pod \"dnsmasq-dns-74cd4f877c-5jjbg\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.335927 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-rp6dp"] Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.389858 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.614324 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5678f567b5-bsnn2"] Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.668427 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-f6jrb"] Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.710356 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" event={"ID":"0816339b-4b76-4faa-aba4-63915e148d46","Type":"ContainerStarted","Data":"cd2ff33113623072c21dfe348cd031924aba150bb26e40e12a2ed5f0bc5663a7"} Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.724180 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-rp6dp" event={"ID":"ca936d36-7ef9-4262-bb55-429e789069e0","Type":"ContainerStarted","Data":"c89bfd46663ff88cc6ed27d22b44daefb6a3111b66fbe6b91f28f79e1b7fd31b"} Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.730791 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-f6jrb" event={"ID":"4cfaf87a-9c55-42fa-8083-490f6c936d04","Type":"ContainerStarted","Data":"24d23715b2790db396561ff6345f5a1f4a01420fda59237c9f8f5cfa52870e29"} Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.752932 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-rp6dp" podStartSLOduration=2.752905905 podStartE2EDuration="2.752905905s" podCreationTimestamp="2025-12-03 16:52:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:10.751699312 +0000 UTC m=+1254.165521200" watchObservedRunningTime="2025-12-03 16:52:10.752905905 +0000 UTC m=+1254.166727793" Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.915401 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-s42fp"] Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.966438 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:52:10 crc kubenswrapper[5002]: W1203 16:52:10.975325 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod461d51c2_c143_4f94_b3a9_79ccf2c9069b.slice/crio-1ae477ce6e4a686344f8a52538fa690eca562b276a1d68a2483366850270329b WatchSource:0}: Error finding container 1ae477ce6e4a686344f8a52538fa690eca562b276a1d68a2483366850270329b: Status 404 returned error can't find the container with id 1ae477ce6e4a686344f8a52538fa690eca562b276a1d68a2483366850270329b Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.978441 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-7wlqt"] Dec 03 16:52:10 crc kubenswrapper[5002]: I1203 16:52:10.996186 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-pk54x"] Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.015087 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74cd4f877c-5jjbg"] Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.792159 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada","Type":"ContainerStarted","Data":"fbd599ef932d4d9007cbf45e8e388188180d7c7fa8d147a436b163b44d2d9764"} Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.795051 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-pk54x" event={"ID":"99fb9387-8126-480b-a909-f826c3ac626d","Type":"ContainerStarted","Data":"5b5a303ebf9738ce537ec880599578e562f18cd791eb473e80e3e5960ab6fee3"} Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.801903 5002 generic.go:334] "Generic (PLEG): container finished" podID="0816339b-4b76-4faa-aba4-63915e148d46" containerID="11aab485e2a7c6df5c3a98023c71db5a026a4429613600127a5a82cb4fd4a782" exitCode=0 Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.802158 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" event={"ID":"0816339b-4b76-4faa-aba4-63915e148d46","Type":"ContainerDied","Data":"11aab485e2a7c6df5c3a98023c71db5a026a4429613600127a5a82cb4fd4a782"} Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.812166 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7wlqt" event={"ID":"b6dbbdb3-c51b-4f76-8605-04bcfaa83451","Type":"ContainerStarted","Data":"7c1c6e76f0b9757a020488156dcfe0e0bfc7ff4153a2769516dab275a9e99306"} Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.819554 5002 generic.go:334] "Generic (PLEG): container finished" podID="461d51c2-c143-4f94-b3a9-79ccf2c9069b" containerID="6d66278c7001fdc3e7a1f4271e5f3609eddfa667ec9ae2c18bad610335806764" exitCode=0 Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.819670 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" event={"ID":"461d51c2-c143-4f94-b3a9-79ccf2c9069b","Type":"ContainerDied","Data":"6d66278c7001fdc3e7a1f4271e5f3609eddfa667ec9ae2c18bad610335806764"} Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.819725 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" event={"ID":"461d51c2-c143-4f94-b3a9-79ccf2c9069b","Type":"ContainerStarted","Data":"1ae477ce6e4a686344f8a52538fa690eca562b276a1d68a2483366850270329b"} Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.832233 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-rp6dp" event={"ID":"ca936d36-7ef9-4262-bb55-429e789069e0","Type":"ContainerStarted","Data":"79224a40a4f46d3fa2c397f5179d0ff885c37571aae8f52f0e396327b7ecab0c"} Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.860444 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-f6jrb" event={"ID":"4cfaf87a-9c55-42fa-8083-490f6c936d04","Type":"ContainerStarted","Data":"f2dec3814f976a46c85e2eba759eb9becb8d98ee7d15d78dde0b17aeaa7f7b6c"} Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.876832 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-s42fp" event={"ID":"3dddc7b5-9eb0-495e-b81c-45f085598280","Type":"ContainerStarted","Data":"f0012bfb43e3c0e965579bb5a1f5dedeb80c5f739981955b2b47f778d6874323"} Dec 03 16:52:11 crc kubenswrapper[5002]: I1203 16:52:11.909761 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-f6jrb" podStartSLOduration=2.909727483 podStartE2EDuration="2.909727483s" podCreationTimestamp="2025-12-03 16:52:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:11.901651923 +0000 UTC m=+1255.315473811" watchObservedRunningTime="2025-12-03 16:52:11.909727483 +0000 UTC m=+1255.323549371" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.403437 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.498802 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.612688 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-config\") pod \"0816339b-4b76-4faa-aba4-63915e148d46\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.612807 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-ovsdbserver-sb\") pod \"0816339b-4b76-4faa-aba4-63915e148d46\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.612885 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-ovsdbserver-nb\") pod \"0816339b-4b76-4faa-aba4-63915e148d46\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.612964 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdjlv\" (UniqueName: \"kubernetes.io/projected/0816339b-4b76-4faa-aba4-63915e148d46-kube-api-access-qdjlv\") pod \"0816339b-4b76-4faa-aba4-63915e148d46\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.613938 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-dns-svc\") pod \"0816339b-4b76-4faa-aba4-63915e148d46\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.613979 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-dns-swift-storage-0\") pod \"0816339b-4b76-4faa-aba4-63915e148d46\" (UID: \"0816339b-4b76-4faa-aba4-63915e148d46\") " Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.624392 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0816339b-4b76-4faa-aba4-63915e148d46-kube-api-access-qdjlv" (OuterVolumeSpecName: "kube-api-access-qdjlv") pod "0816339b-4b76-4faa-aba4-63915e148d46" (UID: "0816339b-4b76-4faa-aba4-63915e148d46"). InnerVolumeSpecName "kube-api-access-qdjlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.657017 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0816339b-4b76-4faa-aba4-63915e148d46" (UID: "0816339b-4b76-4faa-aba4-63915e148d46"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.658801 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0816339b-4b76-4faa-aba4-63915e148d46" (UID: "0816339b-4b76-4faa-aba4-63915e148d46"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.674399 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0816339b-4b76-4faa-aba4-63915e148d46" (UID: "0816339b-4b76-4faa-aba4-63915e148d46"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.692282 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0816339b-4b76-4faa-aba4-63915e148d46" (UID: "0816339b-4b76-4faa-aba4-63915e148d46"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.696034 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-config" (OuterVolumeSpecName: "config") pod "0816339b-4b76-4faa-aba4-63915e148d46" (UID: "0816339b-4b76-4faa-aba4-63915e148d46"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.716115 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.716155 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.716165 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.716177 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdjlv\" (UniqueName: \"kubernetes.io/projected/0816339b-4b76-4faa-aba4-63915e148d46-kube-api-access-qdjlv\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.716185 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.716193 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0816339b-4b76-4faa-aba4-63915e148d46-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.903587 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.904142 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5678f567b5-bsnn2" event={"ID":"0816339b-4b76-4faa-aba4-63915e148d46","Type":"ContainerDied","Data":"cd2ff33113623072c21dfe348cd031924aba150bb26e40e12a2ed5f0bc5663a7"} Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.904184 5002 scope.go:117] "RemoveContainer" containerID="11aab485e2a7c6df5c3a98023c71db5a026a4429613600127a5a82cb4fd4a782" Dec 03 16:52:12 crc kubenswrapper[5002]: I1203 16:52:12.994494 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5678f567b5-bsnn2"] Dec 03 16:52:13 crc kubenswrapper[5002]: I1203 16:52:13.003982 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5678f567b5-bsnn2"] Dec 03 16:52:13 crc kubenswrapper[5002]: I1203 16:52:13.932482 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" event={"ID":"461d51c2-c143-4f94-b3a9-79ccf2c9069b","Type":"ContainerStarted","Data":"139830ceef2ce1034750af13d72ff194753939ccc81196bd540e80b978b34eaf"} Dec 03 16:52:14 crc kubenswrapper[5002]: I1203 16:52:14.852169 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0816339b-4b76-4faa-aba4-63915e148d46" path="/var/lib/kubelet/pods/0816339b-4b76-4faa-aba4-63915e148d46/volumes" Dec 03 16:52:14 crc kubenswrapper[5002]: I1203 16:52:14.945449 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:14 crc kubenswrapper[5002]: I1203 16:52:14.993014 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" podStartSLOduration=5.992989385 podStartE2EDuration="5.992989385s" podCreationTimestamp="2025-12-03 16:52:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:14.97880251 +0000 UTC m=+1258.392624418" watchObservedRunningTime="2025-12-03 16:52:14.992989385 +0000 UTC m=+1258.406811273" Dec 03 16:52:16 crc kubenswrapper[5002]: I1203 16:52:16.973715 5002 generic.go:334] "Generic (PLEG): container finished" podID="ca936d36-7ef9-4262-bb55-429e789069e0" containerID="79224a40a4f46d3fa2c397f5179d0ff885c37571aae8f52f0e396327b7ecab0c" exitCode=0 Dec 03 16:52:16 crc kubenswrapper[5002]: I1203 16:52:16.974311 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-rp6dp" event={"ID":"ca936d36-7ef9-4262-bb55-429e789069e0","Type":"ContainerDied","Data":"79224a40a4f46d3fa2c397f5179d0ff885c37571aae8f52f0e396327b7ecab0c"} Dec 03 16:52:20 crc kubenswrapper[5002]: I1203 16:52:20.393066 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:20 crc kubenswrapper[5002]: I1203 16:52:20.464252 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-864b648dc7-x77ct"] Dec 03 16:52:20 crc kubenswrapper[5002]: I1203 16:52:20.464733 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" podUID="2b410b16-abbb-4b0e-ada3-70dd42a11ec2" containerName="dnsmasq-dns" containerID="cri-o://03b89cb2436ac0c1009dc99d0feaff080ad885db9e3bda35aa9432496182be09" gracePeriod=10 Dec 03 16:52:20 crc kubenswrapper[5002]: I1203 16:52:20.916957 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:52:20 crc kubenswrapper[5002]: I1203 16:52:20.917537 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:52:20 crc kubenswrapper[5002]: I1203 16:52:20.917593 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:52:20 crc kubenswrapper[5002]: I1203 16:52:20.918492 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"80bed6e41a955a593b74ebe3d33480022d2c94cec0b0862556f213fb12fa5abe"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:52:20 crc kubenswrapper[5002]: I1203 16:52:20.918561 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://80bed6e41a955a593b74ebe3d33480022d2c94cec0b0862556f213fb12fa5abe" gracePeriod=600 Dec 03 16:52:21 crc kubenswrapper[5002]: I1203 16:52:21.047736 5002 generic.go:334] "Generic (PLEG): container finished" podID="2b410b16-abbb-4b0e-ada3-70dd42a11ec2" containerID="03b89cb2436ac0c1009dc99d0feaff080ad885db9e3bda35aa9432496182be09" exitCode=0 Dec 03 16:52:21 crc kubenswrapper[5002]: I1203 16:52:21.047809 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" event={"ID":"2b410b16-abbb-4b0e-ada3-70dd42a11ec2","Type":"ContainerDied","Data":"03b89cb2436ac0c1009dc99d0feaff080ad885db9e3bda35aa9432496182be09"} Dec 03 16:52:22 crc kubenswrapper[5002]: I1203 16:52:22.072924 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="80bed6e41a955a593b74ebe3d33480022d2c94cec0b0862556f213fb12fa5abe" exitCode=0 Dec 03 16:52:22 crc kubenswrapper[5002]: I1203 16:52:22.072979 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"80bed6e41a955a593b74ebe3d33480022d2c94cec0b0862556f213fb12fa5abe"} Dec 03 16:52:22 crc kubenswrapper[5002]: I1203 16:52:22.073023 5002 scope.go:117] "RemoveContainer" containerID="73ac542ac5ae95737fc5bd0085cb65082e08deae3560c2f23506ea5bddf84026" Dec 03 16:52:22 crc kubenswrapper[5002]: I1203 16:52:22.986176 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.085402 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-rp6dp" event={"ID":"ca936d36-7ef9-4262-bb55-429e789069e0","Type":"ContainerDied","Data":"c89bfd46663ff88cc6ed27d22b44daefb6a3111b66fbe6b91f28f79e1b7fd31b"} Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.085478 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-rp6dp" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.085490 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c89bfd46663ff88cc6ed27d22b44daefb6a3111b66fbe6b91f28f79e1b7fd31b" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.094780 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-fernet-keys\") pod \"ca936d36-7ef9-4262-bb55-429e789069e0\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.095125 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-credential-keys\") pod \"ca936d36-7ef9-4262-bb55-429e789069e0\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.095191 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-combined-ca-bundle\") pod \"ca936d36-7ef9-4262-bb55-429e789069e0\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.095961 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-config-data\") pod \"ca936d36-7ef9-4262-bb55-429e789069e0\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.096040 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xd8zv\" (UniqueName: \"kubernetes.io/projected/ca936d36-7ef9-4262-bb55-429e789069e0-kube-api-access-xd8zv\") pod \"ca936d36-7ef9-4262-bb55-429e789069e0\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.096200 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-scripts\") pod \"ca936d36-7ef9-4262-bb55-429e789069e0\" (UID: \"ca936d36-7ef9-4262-bb55-429e789069e0\") " Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.103074 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ca936d36-7ef9-4262-bb55-429e789069e0" (UID: "ca936d36-7ef9-4262-bb55-429e789069e0"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.103145 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ca936d36-7ef9-4262-bb55-429e789069e0" (UID: "ca936d36-7ef9-4262-bb55-429e789069e0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.103333 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-scripts" (OuterVolumeSpecName: "scripts") pod "ca936d36-7ef9-4262-bb55-429e789069e0" (UID: "ca936d36-7ef9-4262-bb55-429e789069e0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.104915 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca936d36-7ef9-4262-bb55-429e789069e0-kube-api-access-xd8zv" (OuterVolumeSpecName: "kube-api-access-xd8zv") pod "ca936d36-7ef9-4262-bb55-429e789069e0" (UID: "ca936d36-7ef9-4262-bb55-429e789069e0"). InnerVolumeSpecName "kube-api-access-xd8zv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.129095 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca936d36-7ef9-4262-bb55-429e789069e0" (UID: "ca936d36-7ef9-4262-bb55-429e789069e0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.136075 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-config-data" (OuterVolumeSpecName: "config-data") pod "ca936d36-7ef9-4262-bb55-429e789069e0" (UID: "ca936d36-7ef9-4262-bb55-429e789069e0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.199044 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.199322 5002 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.199699 5002 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.199777 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.199832 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca936d36-7ef9-4262-bb55-429e789069e0-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.199888 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xd8zv\" (UniqueName: \"kubernetes.io/projected/ca936d36-7ef9-4262-bb55-429e789069e0-kube-api-access-xd8zv\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:23 crc kubenswrapper[5002]: I1203 16:52:23.474043 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" podUID="2b410b16-abbb-4b0e-ada3-70dd42a11ec2" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.176274 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-rp6dp"] Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.186554 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-rp6dp"] Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.297869 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-4kfbq"] Dec 03 16:52:24 crc kubenswrapper[5002]: E1203 16:52:24.299362 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0816339b-4b76-4faa-aba4-63915e148d46" containerName="init" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.299491 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0816339b-4b76-4faa-aba4-63915e148d46" containerName="init" Dec 03 16:52:24 crc kubenswrapper[5002]: E1203 16:52:24.299594 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca936d36-7ef9-4262-bb55-429e789069e0" containerName="keystone-bootstrap" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.299655 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca936d36-7ef9-4262-bb55-429e789069e0" containerName="keystone-bootstrap" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.300148 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="0816339b-4b76-4faa-aba4-63915e148d46" containerName="init" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.300262 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca936d36-7ef9-4262-bb55-429e789069e0" containerName="keystone-bootstrap" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.301979 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.306715 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.307808 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.307920 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6l4ll" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.308085 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.308193 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.308597 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-4kfbq"] Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.434373 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-combined-ca-bundle\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.434488 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-config-data\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.434616 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-credential-keys\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.434669 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lt9w\" (UniqueName: \"kubernetes.io/projected/b21b644c-09a6-4080-ba90-e2c8eb798535-kube-api-access-6lt9w\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.434724 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-scripts\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.434801 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-fernet-keys\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.536045 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-credential-keys\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.536113 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lt9w\" (UniqueName: \"kubernetes.io/projected/b21b644c-09a6-4080-ba90-e2c8eb798535-kube-api-access-6lt9w\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.536137 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-scripts\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.536192 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-fernet-keys\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.536222 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-combined-ca-bundle\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.536263 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-config-data\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.544623 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-scripts\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.545484 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-config-data\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.546307 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-fernet-keys\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.548708 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-combined-ca-bundle\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.554523 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-credential-keys\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.556604 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lt9w\" (UniqueName: \"kubernetes.io/projected/b21b644c-09a6-4080-ba90-e2c8eb798535-kube-api-access-6lt9w\") pod \"keystone-bootstrap-4kfbq\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.624292 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:24 crc kubenswrapper[5002]: I1203 16:52:24.852573 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca936d36-7ef9-4262-bb55-429e789069e0" path="/var/lib/kubelet/pods/ca936d36-7ef9-4262-bb55-429e789069e0/volumes" Dec 03 16:52:27 crc kubenswrapper[5002]: I1203 16:52:27.138350 5002 generic.go:334] "Generic (PLEG): container finished" podID="cf3779ea-e418-4c90-9c5e-74e0c8590c75" containerID="5923d9aee115cda041cea2387cd5646dd110c943ef7d4fb8ab87b7b965477538" exitCode=0 Dec 03 16:52:27 crc kubenswrapper[5002]: I1203 16:52:27.138406 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-t6r47" event={"ID":"cf3779ea-e418-4c90-9c5e-74e0c8590c75","Type":"ContainerDied","Data":"5923d9aee115cda041cea2387cd5646dd110c943ef7d4fb8ab87b7b965477538"} Dec 03 16:52:33 crc kubenswrapper[5002]: I1203 16:52:33.474285 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" podUID="2b410b16-abbb-4b0e-ada3-70dd42a11ec2" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: i/o timeout" Dec 03 16:52:34 crc kubenswrapper[5002]: E1203 16:52:34.143834 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:82006b9c64d4c5f80483cda262d960ce6be4813665158ef1a53ea7734bbe431f" Dec 03 16:52:34 crc kubenswrapper[5002]: E1203 16:52:34.144388 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:82006b9c64d4c5f80483cda262d960ce6be4813665158ef1a53ea7734bbe431f,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pd4zb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-pk54x_openstack(99fb9387-8126-480b-a909-f826c3ac626d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:52:34 crc kubenswrapper[5002]: E1203 16:52:34.145902 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-pk54x" podUID="99fb9387-8126-480b-a909-f826c3ac626d" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.220093 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-t6r47" event={"ID":"cf3779ea-e418-4c90-9c5e-74e0c8590c75","Type":"ContainerDied","Data":"aec162f313774e4a42a64e6797278708ced8f698c2aa74be954ad7da7e1ac698"} Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.220491 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aec162f313774e4a42a64e6797278708ced8f698c2aa74be954ad7da7e1ac698" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.222562 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" event={"ID":"2b410b16-abbb-4b0e-ada3-70dd42a11ec2","Type":"ContainerDied","Data":"907f18e4ee3f9cce1b51760fe79d46508c178db99e8e519f1435e69394685145"} Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.222612 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="907f18e4ee3f9cce1b51760fe79d46508c178db99e8e519f1435e69394685145" Dec 03 16:52:34 crc kubenswrapper[5002]: E1203 16:52:34.223976 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:82006b9c64d4c5f80483cda262d960ce6be4813665158ef1a53ea7734bbe431f\\\"\"" pod="openstack/barbican-db-sync-pk54x" podUID="99fb9387-8126-480b-a909-f826c3ac626d" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.248276 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-t6r47" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.256313 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.323458 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-dns-swift-storage-0\") pod \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.323594 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-combined-ca-bundle\") pod \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.323773 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf2nd\" (UniqueName: \"kubernetes.io/projected/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-kube-api-access-gf2nd\") pod \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.323826 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-db-sync-config-data\") pod \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.323883 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-dns-svc\") pod \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.323953 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqg4f\" (UniqueName: \"kubernetes.io/projected/cf3779ea-e418-4c90-9c5e-74e0c8590c75-kube-api-access-dqg4f\") pod \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.323982 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-config-data\") pod \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\" (UID: \"cf3779ea-e418-4c90-9c5e-74e0c8590c75\") " Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.324799 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-ovsdbserver-nb\") pod \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.325870 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-config\") pod \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.325919 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-ovsdbserver-sb\") pod \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\" (UID: \"2b410b16-abbb-4b0e-ada3-70dd42a11ec2\") " Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.336610 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-kube-api-access-gf2nd" (OuterVolumeSpecName: "kube-api-access-gf2nd") pod "2b410b16-abbb-4b0e-ada3-70dd42a11ec2" (UID: "2b410b16-abbb-4b0e-ada3-70dd42a11ec2"). InnerVolumeSpecName "kube-api-access-gf2nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.343226 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "cf3779ea-e418-4c90-9c5e-74e0c8590c75" (UID: "cf3779ea-e418-4c90-9c5e-74e0c8590c75"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.352723 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf3779ea-e418-4c90-9c5e-74e0c8590c75-kube-api-access-dqg4f" (OuterVolumeSpecName: "kube-api-access-dqg4f") pod "cf3779ea-e418-4c90-9c5e-74e0c8590c75" (UID: "cf3779ea-e418-4c90-9c5e-74e0c8590c75"). InnerVolumeSpecName "kube-api-access-dqg4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.399012 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf3779ea-e418-4c90-9c5e-74e0c8590c75" (UID: "cf3779ea-e418-4c90-9c5e-74e0c8590c75"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.411350 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-config-data" (OuterVolumeSpecName: "config-data") pod "cf3779ea-e418-4c90-9c5e-74e0c8590c75" (UID: "cf3779ea-e418-4c90-9c5e-74e0c8590c75"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.414648 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2b410b16-abbb-4b0e-ada3-70dd42a11ec2" (UID: "2b410b16-abbb-4b0e-ada3-70dd42a11ec2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.415332 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2b410b16-abbb-4b0e-ada3-70dd42a11ec2" (UID: "2b410b16-abbb-4b0e-ada3-70dd42a11ec2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.417584 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2b410b16-abbb-4b0e-ada3-70dd42a11ec2" (UID: "2b410b16-abbb-4b0e-ada3-70dd42a11ec2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.422722 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2b410b16-abbb-4b0e-ada3-70dd42a11ec2" (UID: "2b410b16-abbb-4b0e-ada3-70dd42a11ec2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.427159 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.427188 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqg4f\" (UniqueName: \"kubernetes.io/projected/cf3779ea-e418-4c90-9c5e-74e0c8590c75-kube-api-access-dqg4f\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.427202 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.427210 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.427222 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.427230 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.427239 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.427247 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf2nd\" (UniqueName: \"kubernetes.io/projected/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-kube-api-access-gf2nd\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.427256 5002 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cf3779ea-e418-4c90-9c5e-74e0c8590c75-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.429793 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-config" (OuterVolumeSpecName: "config") pod "2b410b16-abbb-4b0e-ada3-70dd42a11ec2" (UID: "2b410b16-abbb-4b0e-ada3-70dd42a11ec2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:34 crc kubenswrapper[5002]: I1203 16:52:34.528367 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b410b16-abbb-4b0e-ada3-70dd42a11ec2-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.231232 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-t6r47" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.231293 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.280531 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-864b648dc7-x77ct"] Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.294408 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-864b648dc7-x77ct"] Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.697948 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74fd8b655f-n9pvr"] Dec 03 16:52:35 crc kubenswrapper[5002]: E1203 16:52:35.698356 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3779ea-e418-4c90-9c5e-74e0c8590c75" containerName="glance-db-sync" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.698371 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3779ea-e418-4c90-9c5e-74e0c8590c75" containerName="glance-db-sync" Dec 03 16:52:35 crc kubenswrapper[5002]: E1203 16:52:35.698389 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b410b16-abbb-4b0e-ada3-70dd42a11ec2" containerName="init" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.698395 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b410b16-abbb-4b0e-ada3-70dd42a11ec2" containerName="init" Dec 03 16:52:35 crc kubenswrapper[5002]: E1203 16:52:35.698429 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b410b16-abbb-4b0e-ada3-70dd42a11ec2" containerName="dnsmasq-dns" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.698435 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b410b16-abbb-4b0e-ada3-70dd42a11ec2" containerName="dnsmasq-dns" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.698618 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3779ea-e418-4c90-9c5e-74e0c8590c75" containerName="glance-db-sync" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.698629 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b410b16-abbb-4b0e-ada3-70dd42a11ec2" containerName="dnsmasq-dns" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.700658 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.734555 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74fd8b655f-n9pvr"] Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.758877 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv4cp\" (UniqueName: \"kubernetes.io/projected/55dc7d8c-e73d-486d-8adf-d5e770601947-kube-api-access-mv4cp\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.758955 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-ovsdbserver-sb\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.759009 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-dns-svc\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.759063 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-ovsdbserver-nb\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.759081 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-dns-swift-storage-0\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.759112 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-config\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.861527 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv4cp\" (UniqueName: \"kubernetes.io/projected/55dc7d8c-e73d-486d-8adf-d5e770601947-kube-api-access-mv4cp\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.861640 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-ovsdbserver-sb\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.861725 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-dns-svc\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.861844 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-ovsdbserver-nb\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.861874 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-dns-swift-storage-0\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.861946 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-config\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.863177 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-config\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.863374 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-ovsdbserver-nb\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.863711 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-ovsdbserver-sb\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.871947 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-dns-swift-storage-0\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.874716 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-dns-svc\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: E1203 16:52:35.894041 5002 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:7a2056615520e272bae43ec3f34e2ba7a92c1d364b8d9106b53bd694619fc9c2" Dec 03 16:52:35 crc kubenswrapper[5002]: E1203 16:52:35.894228 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:7a2056615520e272bae43ec3f34e2ba7a92c1d364b8d9106b53bd694619fc9c2,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qx87q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-7wlqt_openstack(b6dbbdb3-c51b-4f76-8605-04bcfaa83451): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:52:35 crc kubenswrapper[5002]: I1203 16:52:35.894850 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv4cp\" (UniqueName: \"kubernetes.io/projected/55dc7d8c-e73d-486d-8adf-d5e770601947-kube-api-access-mv4cp\") pod \"dnsmasq-dns-74fd8b655f-n9pvr\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:35 crc kubenswrapper[5002]: E1203 16:52:35.895656 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-7wlqt" podUID="b6dbbdb3-c51b-4f76-8605-04bcfaa83451" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.028637 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.213813 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-4kfbq"] Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.267854 5002 generic.go:334] "Generic (PLEG): container finished" podID="4cfaf87a-9c55-42fa-8083-490f6c936d04" containerID="f2dec3814f976a46c85e2eba759eb9becb8d98ee7d15d78dde0b17aeaa7f7b6c" exitCode=0 Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.268056 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-f6jrb" event={"ID":"4cfaf87a-9c55-42fa-8083-490f6c936d04","Type":"ContainerDied","Data":"f2dec3814f976a46c85e2eba759eb9becb8d98ee7d15d78dde0b17aeaa7f7b6c"} Dec 03 16:52:36 crc kubenswrapper[5002]: E1203 16:52:36.276120 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:7a2056615520e272bae43ec3f34e2ba7a92c1d364b8d9106b53bd694619fc9c2\\\"\"" pod="openstack/cinder-db-sync-7wlqt" podUID="b6dbbdb3-c51b-4f76-8605-04bcfaa83451" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.605827 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74fd8b655f-n9pvr"] Dec 03 16:52:36 crc kubenswrapper[5002]: W1203 16:52:36.656763 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55dc7d8c_e73d_486d_8adf_d5e770601947.slice/crio-befb76591ca7a474bc2f581bb1b9b3dadf944c8275c3d774437593e759c79162 WatchSource:0}: Error finding container befb76591ca7a474bc2f581bb1b9b3dadf944c8275c3d774437593e759c79162: Status 404 returned error can't find the container with id befb76591ca7a474bc2f581bb1b9b3dadf944c8275c3d774437593e759c79162 Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.753888 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.755968 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.759850 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.760364 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.760638 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-njd5b" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.760990 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.880207 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b410b16-abbb-4b0e-ada3-70dd42a11ec2" path="/var/lib/kubelet/pods/2b410b16-abbb-4b0e-ada3-70dd42a11ec2/volumes" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.887731 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9807c4b8-e2fb-41dc-a0be-b3beac16c378-logs\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.887815 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.887887 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.887907 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-config-data\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.887934 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhpxr\" (UniqueName: \"kubernetes.io/projected/9807c4b8-e2fb-41dc-a0be-b3beac16c378-kube-api-access-lhpxr\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.887961 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-scripts\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.888020 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9807c4b8-e2fb-41dc-a0be-b3beac16c378-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.994920 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9807c4b8-e2fb-41dc-a0be-b3beac16c378-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.994990 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9807c4b8-e2fb-41dc-a0be-b3beac16c378-logs\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.995025 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.995083 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.995115 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-config-data\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.995137 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhpxr\" (UniqueName: \"kubernetes.io/projected/9807c4b8-e2fb-41dc-a0be-b3beac16c378-kube-api-access-lhpxr\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.995156 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-scripts\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:36 crc kubenswrapper[5002]: I1203 16:52:36.996364 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.000652 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9807c4b8-e2fb-41dc-a0be-b3beac16c378-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.001519 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9807c4b8-e2fb-41dc-a0be-b3beac16c378-logs\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.004419 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.006065 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.017180 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-scripts\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.017516 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.018270 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.018617 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-config-data\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.087305 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhpxr\" (UniqueName: \"kubernetes.io/projected/9807c4b8-e2fb-41dc-a0be-b3beac16c378-kube-api-access-lhpxr\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.174771 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.215703 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.228991 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.230286 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-logs\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.230321 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.230401 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.230449 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.230494 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28qh8\" (UniqueName: \"kubernetes.io/projected/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-kube-api-access-28qh8\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.230560 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.295719 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4kfbq" event={"ID":"b21b644c-09a6-4080-ba90-e2c8eb798535","Type":"ContainerStarted","Data":"f8a184e524d64b9dc097913b2900799e364ba6067295af6ea66de4aa4aaa7284"} Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.295792 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4kfbq" event={"ID":"b21b644c-09a6-4080-ba90-e2c8eb798535","Type":"ContainerStarted","Data":"a7b9ba6975c8ec6a502bbad138ba9f81f930ebecac03e6f8efd0ed13923911d0"} Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.300208 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-s42fp" event={"ID":"3dddc7b5-9eb0-495e-b81c-45f085598280","Type":"ContainerStarted","Data":"00852d0e087cce30527deda079a968547452d5d48aaf7f1b080324ccf4304463"} Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.302390 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada","Type":"ContainerStarted","Data":"b519b295f6530017df4a1adca49e3669922b84f7ef33d1d8e8e7a73e8dc47a30"} Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.309675 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"499c319fe6708e676b3e6316237c177a723be8309f300373241f6343d1f6ef57"} Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.313001 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" event={"ID":"55dc7d8c-e73d-486d-8adf-d5e770601947","Type":"ContainerStarted","Data":"befb76591ca7a474bc2f581bb1b9b3dadf944c8275c3d774437593e759c79162"} Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.324129 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-4kfbq" podStartSLOduration=13.324108167 podStartE2EDuration="13.324108167s" podCreationTimestamp="2025-12-03 16:52:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:37.313225192 +0000 UTC m=+1280.727047080" watchObservedRunningTime="2025-12-03 16:52:37.324108167 +0000 UTC m=+1280.737930055" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.332756 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28qh8\" (UniqueName: \"kubernetes.io/projected/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-kube-api-access-28qh8\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.332836 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.332884 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.332922 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-logs\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.332943 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.332994 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.333023 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.333695 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.333868 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-logs\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.343429 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.351464 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-s42fp" podStartSLOduration=5.130548995 podStartE2EDuration="28.351438821s" podCreationTimestamp="2025-12-03 16:52:09 +0000 UTC" firstStartedPulling="2025-12-03 16:52:10.906101932 +0000 UTC m=+1254.319923820" lastFinishedPulling="2025-12-03 16:52:34.126991748 +0000 UTC m=+1277.540813646" observedRunningTime="2025-12-03 16:52:37.336602627 +0000 UTC m=+1280.750424515" watchObservedRunningTime="2025-12-03 16:52:37.351438821 +0000 UTC m=+1280.765260729" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.353466 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.354817 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.362846 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.373046 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.382139 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28qh8\" (UniqueName: \"kubernetes.io/projected/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-kube-api-access-28qh8\") pod \"glance-default-internal-api-0\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.392894 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.489118 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.753692 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.845377 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cfaf87a-9c55-42fa-8083-490f6c936d04-combined-ca-bundle\") pod \"4cfaf87a-9c55-42fa-8083-490f6c936d04\" (UID: \"4cfaf87a-9c55-42fa-8083-490f6c936d04\") " Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.845492 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dk6nb\" (UniqueName: \"kubernetes.io/projected/4cfaf87a-9c55-42fa-8083-490f6c936d04-kube-api-access-dk6nb\") pod \"4cfaf87a-9c55-42fa-8083-490f6c936d04\" (UID: \"4cfaf87a-9c55-42fa-8083-490f6c936d04\") " Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.845594 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4cfaf87a-9c55-42fa-8083-490f6c936d04-config\") pod \"4cfaf87a-9c55-42fa-8083-490f6c936d04\" (UID: \"4cfaf87a-9c55-42fa-8083-490f6c936d04\") " Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.876998 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cfaf87a-9c55-42fa-8083-490f6c936d04-kube-api-access-dk6nb" (OuterVolumeSpecName: "kube-api-access-dk6nb") pod "4cfaf87a-9c55-42fa-8083-490f6c936d04" (UID: "4cfaf87a-9c55-42fa-8083-490f6c936d04"). InnerVolumeSpecName "kube-api-access-dk6nb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.913078 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cfaf87a-9c55-42fa-8083-490f6c936d04-config" (OuterVolumeSpecName: "config") pod "4cfaf87a-9c55-42fa-8083-490f6c936d04" (UID: "4cfaf87a-9c55-42fa-8083-490f6c936d04"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.914188 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cfaf87a-9c55-42fa-8083-490f6c936d04-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4cfaf87a-9c55-42fa-8083-490f6c936d04" (UID: "4cfaf87a-9c55-42fa-8083-490f6c936d04"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.948600 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cfaf87a-9c55-42fa-8083-490f6c936d04-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.948644 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dk6nb\" (UniqueName: \"kubernetes.io/projected/4cfaf87a-9c55-42fa-8083-490f6c936d04-kube-api-access-dk6nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:37 crc kubenswrapper[5002]: I1203 16:52:37.948657 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/4cfaf87a-9c55-42fa-8083-490f6c936d04-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.096445 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.246127 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.338765 5002 generic.go:334] "Generic (PLEG): container finished" podID="55dc7d8c-e73d-486d-8adf-d5e770601947" containerID="717524c8d23bd0aa71e96882a8aba0d5a48713adbf99371b22c75be9d067d554" exitCode=0 Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.338938 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" event={"ID":"55dc7d8c-e73d-486d-8adf-d5e770601947","Type":"ContainerDied","Data":"717524c8d23bd0aa71e96882a8aba0d5a48713adbf99371b22c75be9d067d554"} Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.344653 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-f6jrb" event={"ID":"4cfaf87a-9c55-42fa-8083-490f6c936d04","Type":"ContainerDied","Data":"24d23715b2790db396561ff6345f5a1f4a01420fda59237c9f8f5cfa52870e29"} Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.344693 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24d23715b2790db396561ff6345f5a1f4a01420fda59237c9f8f5cfa52870e29" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.344807 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-f6jrb" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.477792 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-864b648dc7-x77ct" podUID="2b410b16-abbb-4b0e-ada3-70dd42a11ec2" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: i/o timeout" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.511088 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74fd8b655f-n9pvr"] Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.544709 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-849ff95dc5-rrgdn"] Dec 03 16:52:38 crc kubenswrapper[5002]: E1203 16:52:38.545225 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cfaf87a-9c55-42fa-8083-490f6c936d04" containerName="neutron-db-sync" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.545247 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cfaf87a-9c55-42fa-8083-490f6c936d04" containerName="neutron-db-sync" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.545456 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cfaf87a-9c55-42fa-8083-490f6c936d04" containerName="neutron-db-sync" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.546481 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.574737 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-849ff95dc5-rrgdn"] Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.667029 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-ovsdbserver-sb\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.667086 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-dns-swift-storage-0\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.667155 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-config\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.667179 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmgj7\" (UniqueName: \"kubernetes.io/projected/790a58d7-9e20-43fb-a3d4-92d99c595b88-kube-api-access-fmgj7\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.667207 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-dns-svc\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.667237 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-ovsdbserver-nb\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.701438 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-74dcd656b8-rkf4g"] Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.704272 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.725419 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.725630 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.725725 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-985t8" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.725848 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.740260 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-74dcd656b8-rkf4g"] Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.770587 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmgj7\" (UniqueName: \"kubernetes.io/projected/790a58d7-9e20-43fb-a3d4-92d99c595b88-kube-api-access-fmgj7\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.771252 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-dns-svc\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.771361 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-ovsdbserver-nb\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.771475 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-config\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.771590 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-ovsdbserver-sb\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.771629 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-combined-ca-bundle\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.771667 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-dns-swift-storage-0\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.771703 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn2r6\" (UniqueName: \"kubernetes.io/projected/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-kube-api-access-rn2r6\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.771834 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-ovndb-tls-certs\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.771902 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-httpd-config\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.771948 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-config\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.773054 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-config\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.773854 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-dns-svc\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.774389 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-dns-swift-storage-0\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.774758 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-ovsdbserver-sb\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.775255 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-ovsdbserver-nb\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.818004 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmgj7\" (UniqueName: \"kubernetes.io/projected/790a58d7-9e20-43fb-a3d4-92d99c595b88-kube-api-access-fmgj7\") pod \"dnsmasq-dns-849ff95dc5-rrgdn\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.863829 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.874521 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-combined-ca-bundle\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.874614 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn2r6\" (UniqueName: \"kubernetes.io/projected/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-kube-api-access-rn2r6\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.874708 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-ovndb-tls-certs\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.874781 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-httpd-config\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.874879 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-config\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.880222 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-ovndb-tls-certs\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.890303 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-combined-ca-bundle\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.893322 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-httpd-config\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.894659 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn2r6\" (UniqueName: \"kubernetes.io/projected/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-kube-api-access-rn2r6\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:38 crc kubenswrapper[5002]: I1203 16:52:38.898034 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-config\") pod \"neutron-74dcd656b8-rkf4g\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:39 crc kubenswrapper[5002]: I1203 16:52:39.041884 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:39 crc kubenswrapper[5002]: I1203 16:52:39.438316 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f73ed3a3-665f-41fb-bb34-aed1820b1ffb","Type":"ContainerStarted","Data":"6b8b87ed3bc9d91a5cc00106fe0116f7a31e371472bc5fd17174d46f4f0dde19"} Dec 03 16:52:39 crc kubenswrapper[5002]: I1203 16:52:39.575671 5002 generic.go:334] "Generic (PLEG): container finished" podID="3dddc7b5-9eb0-495e-b81c-45f085598280" containerID="00852d0e087cce30527deda079a968547452d5d48aaf7f1b080324ccf4304463" exitCode=0 Dec 03 16:52:39 crc kubenswrapper[5002]: I1203 16:52:39.575832 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-s42fp" event={"ID":"3dddc7b5-9eb0-495e-b81c-45f085598280","Type":"ContainerDied","Data":"00852d0e087cce30527deda079a968547452d5d48aaf7f1b080324ccf4304463"} Dec 03 16:52:39 crc kubenswrapper[5002]: I1203 16:52:39.599693 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9807c4b8-e2fb-41dc-a0be-b3beac16c378","Type":"ContainerStarted","Data":"85b909ea65934acf66d5f6edf03ed011b424fd2aa2cabe220fc0ff259c5680ee"} Dec 03 16:52:39 crc kubenswrapper[5002]: I1203 16:52:39.833460 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-849ff95dc5-rrgdn"] Dec 03 16:52:39 crc kubenswrapper[5002]: I1203 16:52:39.983354 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.026882 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-74dcd656b8-rkf4g"] Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.071497 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:52:40 crc kubenswrapper[5002]: W1203 16:52:40.092931 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac2907af_7d49_4ebb_bdbb_7a82ae373d89.slice/crio-a8a99b25d1037355f443a3757774d4f814a214c2673d674411a685828cef092e WatchSource:0}: Error finding container a8a99b25d1037355f443a3757774d4f814a214c2673d674411a685828cef092e: Status 404 returned error can't find the container with id a8a99b25d1037355f443a3757774d4f814a214c2673d674411a685828cef092e Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.619436 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74dcd656b8-rkf4g" event={"ID":"ac2907af-7d49-4ebb-bdbb-7a82ae373d89","Type":"ContainerStarted","Data":"0a0bf0b3d26021ee54bf8dfe431f204384455cf492c689ea5d631b9323f1e58d"} Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.620077 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74dcd656b8-rkf4g" event={"ID":"ac2907af-7d49-4ebb-bdbb-7a82ae373d89","Type":"ContainerStarted","Data":"a8a99b25d1037355f443a3757774d4f814a214c2673d674411a685828cef092e"} Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.633957 5002 generic.go:334] "Generic (PLEG): container finished" podID="790a58d7-9e20-43fb-a3d4-92d99c595b88" containerID="e90a94d89f57632d70021e7cc676820573fc45f261b65ba19712cf18ea23827d" exitCode=0 Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.634667 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" event={"ID":"790a58d7-9e20-43fb-a3d4-92d99c595b88","Type":"ContainerDied","Data":"e90a94d89f57632d70021e7cc676820573fc45f261b65ba19712cf18ea23827d"} Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.634727 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" event={"ID":"790a58d7-9e20-43fb-a3d4-92d99c595b88","Type":"ContainerStarted","Data":"ce5ed39f8a3e09676fab39bdd8f69459afc3af484bc1d35b973c21e0c07a9794"} Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.637463 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9807c4b8-e2fb-41dc-a0be-b3beac16c378","Type":"ContainerStarted","Data":"d1ca5d60245795c2cf0d7309442c3e788bb87cc4613056c9cd1b73c7d3ce2fe2"} Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.646953 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f73ed3a3-665f-41fb-bb34-aed1820b1ffb","Type":"ContainerStarted","Data":"b229d9865e63e4f4a8bc5cc9d6ff53318f5154de2b9eb82d0069246d5c37759d"} Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.680211 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada","Type":"ContainerStarted","Data":"1834356db2d075b52fe3d8d4adaa3f14c7f2436b9c412aa69dc72aeea55fb5f3"} Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.692686 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" podUID="55dc7d8c-e73d-486d-8adf-d5e770601947" containerName="dnsmasq-dns" containerID="cri-o://664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9" gracePeriod=10 Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.692824 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" event={"ID":"55dc7d8c-e73d-486d-8adf-d5e770601947","Type":"ContainerStarted","Data":"664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9"} Dec 03 16:52:40 crc kubenswrapper[5002]: I1203 16:52:40.692898 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.101881 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.140631 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" podStartSLOduration=6.140611206 podStartE2EDuration="6.140611206s" podCreationTimestamp="2025-12-03 16:52:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:40.719651105 +0000 UTC m=+1284.133473003" watchObservedRunningTime="2025-12-03 16:52:41.140611206 +0000 UTC m=+1284.554433094" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.168241 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-scripts\") pod \"3dddc7b5-9eb0-495e-b81c-45f085598280\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.168437 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-config-data\") pod \"3dddc7b5-9eb0-495e-b81c-45f085598280\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.168552 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dddc7b5-9eb0-495e-b81c-45f085598280-logs\") pod \"3dddc7b5-9eb0-495e-b81c-45f085598280\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.168652 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpv9m\" (UniqueName: \"kubernetes.io/projected/3dddc7b5-9eb0-495e-b81c-45f085598280-kube-api-access-zpv9m\") pod \"3dddc7b5-9eb0-495e-b81c-45f085598280\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.168770 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-combined-ca-bundle\") pod \"3dddc7b5-9eb0-495e-b81c-45f085598280\" (UID: \"3dddc7b5-9eb0-495e-b81c-45f085598280\") " Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.172385 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dddc7b5-9eb0-495e-b81c-45f085598280-logs" (OuterVolumeSpecName: "logs") pod "3dddc7b5-9eb0-495e-b81c-45f085598280" (UID: "3dddc7b5-9eb0-495e-b81c-45f085598280"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.200164 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-scripts" (OuterVolumeSpecName: "scripts") pod "3dddc7b5-9eb0-495e-b81c-45f085598280" (UID: "3dddc7b5-9eb0-495e-b81c-45f085598280"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.214969 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dddc7b5-9eb0-495e-b81c-45f085598280-kube-api-access-zpv9m" (OuterVolumeSpecName: "kube-api-access-zpv9m") pod "3dddc7b5-9eb0-495e-b81c-45f085598280" (UID: "3dddc7b5-9eb0-495e-b81c-45f085598280"). InnerVolumeSpecName "kube-api-access-zpv9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.273542 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpv9m\" (UniqueName: \"kubernetes.io/projected/3dddc7b5-9eb0-495e-b81c-45f085598280-kube-api-access-zpv9m\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.273605 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.273617 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dddc7b5-9eb0-495e-b81c-45f085598280-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.283732 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3dddc7b5-9eb0-495e-b81c-45f085598280" (UID: "3dddc7b5-9eb0-495e-b81c-45f085598280"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.290827 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-config-data" (OuterVolumeSpecName: "config-data") pod "3dddc7b5-9eb0-495e-b81c-45f085598280" (UID: "3dddc7b5-9eb0-495e-b81c-45f085598280"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.377648 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.378050 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dddc7b5-9eb0-495e-b81c-45f085598280-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.464024 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.586395 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mv4cp\" (UniqueName: \"kubernetes.io/projected/55dc7d8c-e73d-486d-8adf-d5e770601947-kube-api-access-mv4cp\") pod \"55dc7d8c-e73d-486d-8adf-d5e770601947\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.586509 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-ovsdbserver-sb\") pod \"55dc7d8c-e73d-486d-8adf-d5e770601947\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.586592 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-dns-svc\") pod \"55dc7d8c-e73d-486d-8adf-d5e770601947\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.586698 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-ovsdbserver-nb\") pod \"55dc7d8c-e73d-486d-8adf-d5e770601947\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.586730 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-dns-swift-storage-0\") pod \"55dc7d8c-e73d-486d-8adf-d5e770601947\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.586794 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-config\") pod \"55dc7d8c-e73d-486d-8adf-d5e770601947\" (UID: \"55dc7d8c-e73d-486d-8adf-d5e770601947\") " Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.602548 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55dc7d8c-e73d-486d-8adf-d5e770601947-kube-api-access-mv4cp" (OuterVolumeSpecName: "kube-api-access-mv4cp") pod "55dc7d8c-e73d-486d-8adf-d5e770601947" (UID: "55dc7d8c-e73d-486d-8adf-d5e770601947"). InnerVolumeSpecName "kube-api-access-mv4cp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.644084 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-config" (OuterVolumeSpecName: "config") pod "55dc7d8c-e73d-486d-8adf-d5e770601947" (UID: "55dc7d8c-e73d-486d-8adf-d5e770601947"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.665313 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "55dc7d8c-e73d-486d-8adf-d5e770601947" (UID: "55dc7d8c-e73d-486d-8adf-d5e770601947"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.696931 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mv4cp\" (UniqueName: \"kubernetes.io/projected/55dc7d8c-e73d-486d-8adf-d5e770601947-kube-api-access-mv4cp\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.696961 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.697008 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.708638 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "55dc7d8c-e73d-486d-8adf-d5e770601947" (UID: "55dc7d8c-e73d-486d-8adf-d5e770601947"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.736447 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "55dc7d8c-e73d-486d-8adf-d5e770601947" (UID: "55dc7d8c-e73d-486d-8adf-d5e770601947"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.740091 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" event={"ID":"790a58d7-9e20-43fb-a3d4-92d99c595b88","Type":"ContainerStarted","Data":"3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679"} Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.741220 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.756000 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9807c4b8-e2fb-41dc-a0be-b3beac16c378","Type":"ContainerStarted","Data":"6efb4984e59cbed1d99ccd7cf2b98c27439349353193765c11885f40650c7e7e"} Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.756177 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9807c4b8-e2fb-41dc-a0be-b3beac16c378" containerName="glance-log" containerID="cri-o://d1ca5d60245795c2cf0d7309442c3e788bb87cc4613056c9cd1b73c7d3ce2fe2" gracePeriod=30 Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.756383 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9807c4b8-e2fb-41dc-a0be-b3beac16c378" containerName="glance-httpd" containerID="cri-o://6efb4984e59cbed1d99ccd7cf2b98c27439349353193765c11885f40650c7e7e" gracePeriod=30 Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.766299 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "55dc7d8c-e73d-486d-8adf-d5e770601947" (UID: "55dc7d8c-e73d-486d-8adf-d5e770601947"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.778518 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6495d47864-tf6dm"] Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.779105 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f73ed3a3-665f-41fb-bb34-aed1820b1ffb" containerName="glance-log" containerID="cri-o://b229d9865e63e4f4a8bc5cc9d6ff53318f5154de2b9eb82d0069246d5c37759d" gracePeriod=30 Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.779284 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f73ed3a3-665f-41fb-bb34-aed1820b1ffb" containerName="glance-httpd" containerID="cri-o://9c48abd3717e9945f429940bf8f9c7479f3b63b99824d809c4a36c397b5e3f88" gracePeriod=30 Dec 03 16:52:41 crc kubenswrapper[5002]: E1203 16:52:41.779467 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55dc7d8c-e73d-486d-8adf-d5e770601947" containerName="dnsmasq-dns" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.779487 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="55dc7d8c-e73d-486d-8adf-d5e770601947" containerName="dnsmasq-dns" Dec 03 16:52:41 crc kubenswrapper[5002]: E1203 16:52:41.779510 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55dc7d8c-e73d-486d-8adf-d5e770601947" containerName="init" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.779519 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="55dc7d8c-e73d-486d-8adf-d5e770601947" containerName="init" Dec 03 16:52:41 crc kubenswrapper[5002]: E1203 16:52:41.779540 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dddc7b5-9eb0-495e-b81c-45f085598280" containerName="placement-db-sync" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.779546 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dddc7b5-9eb0-495e-b81c-45f085598280" containerName="placement-db-sync" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.779714 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="55dc7d8c-e73d-486d-8adf-d5e770601947" containerName="dnsmasq-dns" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.779734 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dddc7b5-9eb0-495e-b81c-45f085598280" containerName="placement-db-sync" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.780712 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f73ed3a3-665f-41fb-bb34-aed1820b1ffb","Type":"ContainerStarted","Data":"9c48abd3717e9945f429940bf8f9c7479f3b63b99824d809c4a36c397b5e3f88"} Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.780824 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.786477 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.788545 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.790672 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" podStartSLOduration=3.790644538 podStartE2EDuration="3.790644538s" podCreationTimestamp="2025-12-03 16:52:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:41.775647921 +0000 UTC m=+1285.189469799" watchObservedRunningTime="2025-12-03 16:52:41.790644538 +0000 UTC m=+1285.204466426" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.791112 5002 generic.go:334] "Generic (PLEG): container finished" podID="b21b644c-09a6-4080-ba90-e2c8eb798535" containerID="f8a184e524d64b9dc097913b2900799e364ba6067295af6ea66de4aa4aaa7284" exitCode=0 Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.791242 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4kfbq" event={"ID":"b21b644c-09a6-4080-ba90-e2c8eb798535","Type":"ContainerDied","Data":"f8a184e524d64b9dc097913b2900799e364ba6067295af6ea66de4aa4aaa7284"} Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.798972 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.799003 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.799013 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/55dc7d8c-e73d-486d-8adf-d5e770601947-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.817391 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-s42fp" event={"ID":"3dddc7b5-9eb0-495e-b81c-45f085598280","Type":"ContainerDied","Data":"f0012bfb43e3c0e965579bb5a1f5dedeb80c5f739981955b2b47f778d6874323"} Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.817459 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0012bfb43e3c0e965579bb5a1f5dedeb80c5f739981955b2b47f778d6874323" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.817414 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-s42fp" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.829059 5002 generic.go:334] "Generic (PLEG): container finished" podID="55dc7d8c-e73d-486d-8adf-d5e770601947" containerID="664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9" exitCode=0 Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.829211 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.829199 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" event={"ID":"55dc7d8c-e73d-486d-8adf-d5e770601947","Type":"ContainerDied","Data":"664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9"} Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.829258 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74fd8b655f-n9pvr" event={"ID":"55dc7d8c-e73d-486d-8adf-d5e770601947","Type":"ContainerDied","Data":"befb76591ca7a474bc2f581bb1b9b3dadf944c8275c3d774437593e759c79162"} Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.830011 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6495d47864-tf6dm"] Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.831150 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74dcd656b8-rkf4g" event={"ID":"ac2907af-7d49-4ebb-bdbb-7a82ae373d89","Type":"ContainerStarted","Data":"e481f0b1d53253d1fe2c40539ac4388ae516d6d690ccae3394dead0a3c95281c"} Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.831561 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.837571 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.837514704 podStartE2EDuration="6.837514704s" podCreationTimestamp="2025-12-03 16:52:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:41.818453485 +0000 UTC m=+1285.232275373" watchObservedRunningTime="2025-12-03 16:52:41.837514704 +0000 UTC m=+1285.251336592" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.838701 5002 scope.go:117] "RemoveContainer" containerID="664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.898813 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.898735029 podStartE2EDuration="6.898735029s" podCreationTimestamp="2025-12-03 16:52:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:41.892401987 +0000 UTC m=+1285.306223875" watchObservedRunningTime="2025-12-03 16:52:41.898735029 +0000 UTC m=+1285.312556917" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.901294 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-scripts\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.901391 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wmbn\" (UniqueName: \"kubernetes.io/projected/1c099352-abbe-4c3a-9431-c854e5333420-kube-api-access-8wmbn\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.901505 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c099352-abbe-4c3a-9431-c854e5333420-logs\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.901527 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-config-data\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.901546 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-internal-tls-certs\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.901621 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-combined-ca-bundle\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.901705 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-public-tls-certs\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.906282 5002 scope.go:117] "RemoveContainer" containerID="717524c8d23bd0aa71e96882a8aba0d5a48713adbf99371b22c75be9d067d554" Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.922868 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74fd8b655f-n9pvr"] Dec 03 16:52:41 crc kubenswrapper[5002]: I1203 16:52:41.926380 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74fd8b655f-n9pvr"] Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.003700 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-public-tls-certs\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.003777 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-scripts\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.003822 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wmbn\" (UniqueName: \"kubernetes.io/projected/1c099352-abbe-4c3a-9431-c854e5333420-kube-api-access-8wmbn\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.003884 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c099352-abbe-4c3a-9431-c854e5333420-logs\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.003905 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-config-data\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.003923 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-internal-tls-certs\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.003949 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-combined-ca-bundle\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.004529 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c099352-abbe-4c3a-9431-c854e5333420-logs\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.008232 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-config-data\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.008880 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-scripts\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.011420 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-combined-ca-bundle\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.016000 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-internal-tls-certs\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.018262 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-public-tls-certs\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.025512 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wmbn\" (UniqueName: \"kubernetes.io/projected/1c099352-abbe-4c3a-9431-c854e5333420-kube-api-access-8wmbn\") pod \"placement-6495d47864-tf6dm\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.077455 5002 scope.go:117] "RemoveContainer" containerID="664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9" Dec 03 16:52:42 crc kubenswrapper[5002]: E1203 16:52:42.079342 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9\": container with ID starting with 664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9 not found: ID does not exist" containerID="664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.079377 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9"} err="failed to get container status \"664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9\": rpc error: code = NotFound desc = could not find container \"664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9\": container with ID starting with 664ee18b20de99ce4eedec31dd62c6e7dd9c99a20e2969bc6f0d26ed8a55f3a9 not found: ID does not exist" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.079399 5002 scope.go:117] "RemoveContainer" containerID="717524c8d23bd0aa71e96882a8aba0d5a48713adbf99371b22c75be9d067d554" Dec 03 16:52:42 crc kubenswrapper[5002]: E1203 16:52:42.079652 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"717524c8d23bd0aa71e96882a8aba0d5a48713adbf99371b22c75be9d067d554\": container with ID starting with 717524c8d23bd0aa71e96882a8aba0d5a48713adbf99371b22c75be9d067d554 not found: ID does not exist" containerID="717524c8d23bd0aa71e96882a8aba0d5a48713adbf99371b22c75be9d067d554" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.079674 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"717524c8d23bd0aa71e96882a8aba0d5a48713adbf99371b22c75be9d067d554"} err="failed to get container status \"717524c8d23bd0aa71e96882a8aba0d5a48713adbf99371b22c75be9d067d554\": rpc error: code = NotFound desc = could not find container \"717524c8d23bd0aa71e96882a8aba0d5a48713adbf99371b22c75be9d067d554\": container with ID starting with 717524c8d23bd0aa71e96882a8aba0d5a48713adbf99371b22c75be9d067d554 not found: ID does not exist" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.108705 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.447195 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-74dcd656b8-rkf4g" podStartSLOduration=4.447174258 podStartE2EDuration="4.447174258s" podCreationTimestamp="2025-12-03 16:52:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:41.948506093 +0000 UTC m=+1285.362327981" watchObservedRunningTime="2025-12-03 16:52:42.447174258 +0000 UTC m=+1285.860996146" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.466280 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-68bfc56b4f-vnlr5"] Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.479168 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.487229 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.487438 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.499755 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-68bfc56b4f-vnlr5"] Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.616365 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-internal-tls-certs\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.616448 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-ovndb-tls-certs\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.616499 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-combined-ca-bundle\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.616522 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-config\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.616546 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-public-tls-certs\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.616592 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmqc9\" (UniqueName: \"kubernetes.io/projected/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-kube-api-access-dmqc9\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.616634 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-httpd-config\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.718580 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-ovndb-tls-certs\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.718680 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-combined-ca-bundle\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.718712 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-config\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.718767 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-public-tls-certs\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.718839 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmqc9\" (UniqueName: \"kubernetes.io/projected/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-kube-api-access-dmqc9\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.718903 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-httpd-config\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.718929 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-internal-tls-certs\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.728883 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-combined-ca-bundle\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.729804 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-internal-tls-certs\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.737871 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-config\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.738709 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-public-tls-certs\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.739322 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-ovndb-tls-certs\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.756720 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmqc9\" (UniqueName: \"kubernetes.io/projected/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-kube-api-access-dmqc9\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.758383 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-httpd-config\") pod \"neutron-68bfc56b4f-vnlr5\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.842948 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.851767 5002 generic.go:334] "Generic (PLEG): container finished" podID="f73ed3a3-665f-41fb-bb34-aed1820b1ffb" containerID="9c48abd3717e9945f429940bf8f9c7479f3b63b99824d809c4a36c397b5e3f88" exitCode=143 Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.851812 5002 generic.go:334] "Generic (PLEG): container finished" podID="f73ed3a3-665f-41fb-bb34-aed1820b1ffb" containerID="b229d9865e63e4f4a8bc5cc9d6ff53318f5154de2b9eb82d0069246d5c37759d" exitCode=143 Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.858458 5002 generic.go:334] "Generic (PLEG): container finished" podID="9807c4b8-e2fb-41dc-a0be-b3beac16c378" containerID="6efb4984e59cbed1d99ccd7cf2b98c27439349353193765c11885f40650c7e7e" exitCode=143 Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.858562 5002 generic.go:334] "Generic (PLEG): container finished" podID="9807c4b8-e2fb-41dc-a0be-b3beac16c378" containerID="d1ca5d60245795c2cf0d7309442c3e788bb87cc4613056c9cd1b73c7d3ce2fe2" exitCode=143 Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.891967 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55dc7d8c-e73d-486d-8adf-d5e770601947" path="/var/lib/kubelet/pods/55dc7d8c-e73d-486d-8adf-d5e770601947/volumes" Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.893289 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f73ed3a3-665f-41fb-bb34-aed1820b1ffb","Type":"ContainerDied","Data":"9c48abd3717e9945f429940bf8f9c7479f3b63b99824d809c4a36c397b5e3f88"} Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.893326 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f73ed3a3-665f-41fb-bb34-aed1820b1ffb","Type":"ContainerDied","Data":"b229d9865e63e4f4a8bc5cc9d6ff53318f5154de2b9eb82d0069246d5c37759d"} Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.893343 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9807c4b8-e2fb-41dc-a0be-b3beac16c378","Type":"ContainerDied","Data":"6efb4984e59cbed1d99ccd7cf2b98c27439349353193765c11885f40650c7e7e"} Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.893360 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9807c4b8-e2fb-41dc-a0be-b3beac16c378","Type":"ContainerDied","Data":"d1ca5d60245795c2cf0d7309442c3e788bb87cc4613056c9cd1b73c7d3ce2fe2"} Dec 03 16:52:42 crc kubenswrapper[5002]: I1203 16:52:42.911021 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6495d47864-tf6dm"] Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.358021 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.448477 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-scripts\") pod \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.448552 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.448623 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9807c4b8-e2fb-41dc-a0be-b3beac16c378-logs\") pod \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.448646 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-config-data\") pod \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.448810 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhpxr\" (UniqueName: \"kubernetes.io/projected/9807c4b8-e2fb-41dc-a0be-b3beac16c378-kube-api-access-lhpxr\") pod \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.449018 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-combined-ca-bundle\") pod \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.449117 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9807c4b8-e2fb-41dc-a0be-b3beac16c378-httpd-run\") pod \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\" (UID: \"9807c4b8-e2fb-41dc-a0be-b3beac16c378\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.449667 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9807c4b8-e2fb-41dc-a0be-b3beac16c378-logs" (OuterVolumeSpecName: "logs") pod "9807c4b8-e2fb-41dc-a0be-b3beac16c378" (UID: "9807c4b8-e2fb-41dc-a0be-b3beac16c378"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.452640 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9807c4b8-e2fb-41dc-a0be-b3beac16c378-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9807c4b8-e2fb-41dc-a0be-b3beac16c378" (UID: "9807c4b8-e2fb-41dc-a0be-b3beac16c378"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.453031 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9807c4b8-e2fb-41dc-a0be-b3beac16c378-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.453069 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9807c4b8-e2fb-41dc-a0be-b3beac16c378-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.461917 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "9807c4b8-e2fb-41dc-a0be-b3beac16c378" (UID: "9807c4b8-e2fb-41dc-a0be-b3beac16c378"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.468341 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-scripts" (OuterVolumeSpecName: "scripts") pod "9807c4b8-e2fb-41dc-a0be-b3beac16c378" (UID: "9807c4b8-e2fb-41dc-a0be-b3beac16c378"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.481485 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9807c4b8-e2fb-41dc-a0be-b3beac16c378-kube-api-access-lhpxr" (OuterVolumeSpecName: "kube-api-access-lhpxr") pod "9807c4b8-e2fb-41dc-a0be-b3beac16c378" (UID: "9807c4b8-e2fb-41dc-a0be-b3beac16c378"). InnerVolumeSpecName "kube-api-access-lhpxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.499689 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9807c4b8-e2fb-41dc-a0be-b3beac16c378" (UID: "9807c4b8-e2fb-41dc-a0be-b3beac16c378"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.522197 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-config-data" (OuterVolumeSpecName: "config-data") pod "9807c4b8-e2fb-41dc-a0be-b3beac16c378" (UID: "9807c4b8-e2fb-41dc-a0be-b3beac16c378"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.555778 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.555837 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.555850 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.555861 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhpxr\" (UniqueName: \"kubernetes.io/projected/9807c4b8-e2fb-41dc-a0be-b3beac16c378-kube-api-access-lhpxr\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.555873 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9807c4b8-e2fb-41dc-a0be-b3beac16c378-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.582263 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.630682 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.632216 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.659211 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.724423 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-68bfc56b4f-vnlr5"] Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.760981 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-config-data\") pod \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.761081 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.761121 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-combined-ca-bundle\") pod \"b21b644c-09a6-4080-ba90-e2c8eb798535\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.761144 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-config-data\") pod \"b21b644c-09a6-4080-ba90-e2c8eb798535\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.761187 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-credential-keys\") pod \"b21b644c-09a6-4080-ba90-e2c8eb798535\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.761238 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-logs\") pod \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.761297 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-scripts\") pod \"b21b644c-09a6-4080-ba90-e2c8eb798535\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.761319 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lt9w\" (UniqueName: \"kubernetes.io/projected/b21b644c-09a6-4080-ba90-e2c8eb798535-kube-api-access-6lt9w\") pod \"b21b644c-09a6-4080-ba90-e2c8eb798535\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.761366 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-httpd-run\") pod \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.761392 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-fernet-keys\") pod \"b21b644c-09a6-4080-ba90-e2c8eb798535\" (UID: \"b21b644c-09a6-4080-ba90-e2c8eb798535\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.761412 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28qh8\" (UniqueName: \"kubernetes.io/projected/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-kube-api-access-28qh8\") pod \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.761438 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-combined-ca-bundle\") pod \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.761469 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-scripts\") pod \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\" (UID: \"f73ed3a3-665f-41fb-bb34-aed1820b1ffb\") " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.762264 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-logs" (OuterVolumeSpecName: "logs") pod "f73ed3a3-665f-41fb-bb34-aed1820b1ffb" (UID: "f73ed3a3-665f-41fb-bb34-aed1820b1ffb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.762565 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f73ed3a3-665f-41fb-bb34-aed1820b1ffb" (UID: "f73ed3a3-665f-41fb-bb34-aed1820b1ffb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.767767 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b21b644c-09a6-4080-ba90-e2c8eb798535-kube-api-access-6lt9w" (OuterVolumeSpecName: "kube-api-access-6lt9w") pod "b21b644c-09a6-4080-ba90-e2c8eb798535" (UID: "b21b644c-09a6-4080-ba90-e2c8eb798535"). InnerVolumeSpecName "kube-api-access-6lt9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.769014 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-scripts" (OuterVolumeSpecName: "scripts") pod "b21b644c-09a6-4080-ba90-e2c8eb798535" (UID: "b21b644c-09a6-4080-ba90-e2c8eb798535"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.770331 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-scripts" (OuterVolumeSpecName: "scripts") pod "f73ed3a3-665f-41fb-bb34-aed1820b1ffb" (UID: "f73ed3a3-665f-41fb-bb34-aed1820b1ffb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.770937 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b21b644c-09a6-4080-ba90-e2c8eb798535" (UID: "b21b644c-09a6-4080-ba90-e2c8eb798535"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.772758 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-kube-api-access-28qh8" (OuterVolumeSpecName: "kube-api-access-28qh8") pod "f73ed3a3-665f-41fb-bb34-aed1820b1ffb" (UID: "f73ed3a3-665f-41fb-bb34-aed1820b1ffb"). InnerVolumeSpecName "kube-api-access-28qh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.776914 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b21b644c-09a6-4080-ba90-e2c8eb798535" (UID: "b21b644c-09a6-4080-ba90-e2c8eb798535"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.782453 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "f73ed3a3-665f-41fb-bb34-aed1820b1ffb" (UID: "f73ed3a3-665f-41fb-bb34-aed1820b1ffb"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.802731 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-config-data" (OuterVolumeSpecName: "config-data") pod "b21b644c-09a6-4080-ba90-e2c8eb798535" (UID: "b21b644c-09a6-4080-ba90-e2c8eb798535"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.811948 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b21b644c-09a6-4080-ba90-e2c8eb798535" (UID: "b21b644c-09a6-4080-ba90-e2c8eb798535"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.812563 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f73ed3a3-665f-41fb-bb34-aed1820b1ffb" (UID: "f73ed3a3-665f-41fb-bb34-aed1820b1ffb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.859332 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-config-data" (OuterVolumeSpecName: "config-data") pod "f73ed3a3-665f-41fb-bb34-aed1820b1ffb" (UID: "f73ed3a3-665f-41fb-bb34-aed1820b1ffb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865033 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865077 5002 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865088 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28qh8\" (UniqueName: \"kubernetes.io/projected/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-kube-api-access-28qh8\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865102 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865112 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865123 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865161 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865171 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865180 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865192 5002 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865200 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f73ed3a3-665f-41fb-bb34-aed1820b1ffb-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865209 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21b644c-09a6-4080-ba90-e2c8eb798535-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.865218 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lt9w\" (UniqueName: \"kubernetes.io/projected/b21b644c-09a6-4080-ba90-e2c8eb798535-kube-api-access-6lt9w\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.895452 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.895891 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f73ed3a3-665f-41fb-bb34-aed1820b1ffb","Type":"ContainerDied","Data":"6b8b87ed3bc9d91a5cc00106fe0116f7a31e371472bc5fd17174d46f4f0dde19"} Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.895994 5002 scope.go:117] "RemoveContainer" containerID="9c48abd3717e9945f429940bf8f9c7479f3b63b99824d809c4a36c397b5e3f88" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.898091 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.904542 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4kfbq" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.905689 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4kfbq" event={"ID":"b21b644c-09a6-4080-ba90-e2c8eb798535","Type":"ContainerDied","Data":"a7b9ba6975c8ec6a502bbad138ba9f81f930ebecac03e6f8efd0ed13923911d0"} Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.905802 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7b9ba6975c8ec6a502bbad138ba9f81f930ebecac03e6f8efd0ed13923911d0" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.914319 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-68bfc56b4f-vnlr5" event={"ID":"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c","Type":"ContainerStarted","Data":"d04bfbb86d18ebb7af9ae348fb63aa74d0d5f68ca9be4717d63dcf629b3c5902"} Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.916402 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6495d47864-tf6dm" event={"ID":"1c099352-abbe-4c3a-9431-c854e5333420","Type":"ContainerStarted","Data":"02d099c7b5a52a32907ff960d5be05cd37532b1c4db658f445b99342fec6b927"} Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.916434 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6495d47864-tf6dm" event={"ID":"1c099352-abbe-4c3a-9431-c854e5333420","Type":"ContainerStarted","Data":"baf09ec78f1fa62fb965a5e1d80a324a472bde92f7bf1614c38cde3288421bd3"} Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.916449 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6495d47864-tf6dm" event={"ID":"1c099352-abbe-4c3a-9431-c854e5333420","Type":"ContainerStarted","Data":"8714948cd07a1407e7d4a347cc13d0e2b6ac9298baf5382e7f3426c0692d9d66"} Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.918611 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.918644 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.925238 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9807c4b8-e2fb-41dc-a0be-b3beac16c378","Type":"ContainerDied","Data":"85b909ea65934acf66d5f6edf03ed011b424fd2aa2cabe220fc0ff259c5680ee"} Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.925333 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.950159 5002 scope.go:117] "RemoveContainer" containerID="b229d9865e63e4f4a8bc5cc9d6ff53318f5154de2b9eb82d0069246d5c37759d" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.952972 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6495d47864-tf6dm" podStartSLOduration=2.952945239 podStartE2EDuration="2.952945239s" podCreationTimestamp="2025-12-03 16:52:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:43.937083617 +0000 UTC m=+1287.350905505" watchObservedRunningTime="2025-12-03 16:52:43.952945239 +0000 UTC m=+1287.366767137" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.968328 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.989906 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:52:43 crc kubenswrapper[5002]: I1203 16:52:43.999592 5002 scope.go:117] "RemoveContainer" containerID="6efb4984e59cbed1d99ccd7cf2b98c27439349353193765c11885f40650c7e7e" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.026007 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.051706 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.070987 5002 scope.go:117] "RemoveContainer" containerID="d1ca5d60245795c2cf0d7309442c3e788bb87cc4613056c9cd1b73c7d3ce2fe2" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.073041 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.097205 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-78586996b8-nkxdr"] Dec 03 16:52:44 crc kubenswrapper[5002]: E1203 16:52:44.097705 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f73ed3a3-665f-41fb-bb34-aed1820b1ffb" containerName="glance-httpd" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.097724 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f73ed3a3-665f-41fb-bb34-aed1820b1ffb" containerName="glance-httpd" Dec 03 16:52:44 crc kubenswrapper[5002]: E1203 16:52:44.097758 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9807c4b8-e2fb-41dc-a0be-b3beac16c378" containerName="glance-log" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.097765 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9807c4b8-e2fb-41dc-a0be-b3beac16c378" containerName="glance-log" Dec 03 16:52:44 crc kubenswrapper[5002]: E1203 16:52:44.097841 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f73ed3a3-665f-41fb-bb34-aed1820b1ffb" containerName="glance-log" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.097851 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f73ed3a3-665f-41fb-bb34-aed1820b1ffb" containerName="glance-log" Dec 03 16:52:44 crc kubenswrapper[5002]: E1203 16:52:44.097865 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9807c4b8-e2fb-41dc-a0be-b3beac16c378" containerName="glance-httpd" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.097872 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9807c4b8-e2fb-41dc-a0be-b3beac16c378" containerName="glance-httpd" Dec 03 16:52:44 crc kubenswrapper[5002]: E1203 16:52:44.097886 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21b644c-09a6-4080-ba90-e2c8eb798535" containerName="keystone-bootstrap" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.097894 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21b644c-09a6-4080-ba90-e2c8eb798535" containerName="keystone-bootstrap" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.099407 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f73ed3a3-665f-41fb-bb34-aed1820b1ffb" containerName="glance-log" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.099432 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9807c4b8-e2fb-41dc-a0be-b3beac16c378" containerName="glance-log" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.099450 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9807c4b8-e2fb-41dc-a0be-b3beac16c378" containerName="glance-httpd" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.099463 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f73ed3a3-665f-41fb-bb34-aed1820b1ffb" containerName="glance-httpd" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.099475 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21b644c-09a6-4080-ba90-e2c8eb798535" containerName="keystone-bootstrap" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.100221 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.106395 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.110025 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.110243 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.110387 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6l4ll" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.110553 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.110707 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.122125 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-78586996b8-nkxdr"] Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.139131 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.141175 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.146772 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-njd5b" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.147078 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.148641 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.148775 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.150653 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.151915 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.179339 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.179611 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.181318 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.184815 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sl2pp\" (UniqueName: \"kubernetes.io/projected/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-kube-api-access-sl2pp\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.185562 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-credential-keys\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.185647 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-internal-tls-certs\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.185686 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-public-tls-certs\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.185809 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-scripts\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.186002 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-config-data\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.186056 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-fernet-keys\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.186081 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-combined-ca-bundle\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.214067 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.287753 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-config-data\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.288652 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-credential-keys\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.288779 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-internal-tls-certs\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.288957 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-public-tls-certs\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.289269 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-scripts\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.289467 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-scripts\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.289573 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psht7\" (UniqueName: \"kubernetes.io/projected/ae1662ba-738d-4085-9744-8ba6b84a1436-kube-api-access-psht7\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.289985 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae1662ba-738d-4085-9744-8ba6b84a1436-logs\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.290091 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.290190 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-config-data\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.290278 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-fernet-keys\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.290361 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-combined-ca-bundle\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.290485 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sl2pp\" (UniqueName: \"kubernetes.io/projected/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-kube-api-access-sl2pp\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.290594 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.290712 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.290821 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae1662ba-738d-4085-9744-8ba6b84a1436-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.311115 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-public-tls-certs\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.311669 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-config-data\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.312139 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-credential-keys\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.315263 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-scripts\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.316512 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-combined-ca-bundle\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.316977 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-internal-tls-certs\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.328520 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-fernet-keys\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.339624 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sl2pp\" (UniqueName: \"kubernetes.io/projected/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-kube-api-access-sl2pp\") pod \"keystone-78586996b8-nkxdr\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.393617 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-scripts\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.394995 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psht7\" (UniqueName: \"kubernetes.io/projected/ae1662ba-738d-4085-9744-8ba6b84a1436-kube-api-access-psht7\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.395542 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae1662ba-738d-4085-9744-8ba6b84a1436-logs\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.396131 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.396259 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.396079 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae1662ba-738d-4085-9744-8ba6b84a1436-logs\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.396851 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mld2\" (UniqueName: \"kubernetes.io/projected/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-kube-api-access-6mld2\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.396940 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.397030 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.397126 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.397216 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.397312 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.397393 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae1662ba-738d-4085-9744-8ba6b84a1436-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.397512 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-config-data\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.397622 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.397709 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.397826 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-logs\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.397848 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.398460 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae1662ba-738d-4085-9744-8ba6b84a1436-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.398694 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-scripts\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.405760 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.409233 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-config-data\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.414379 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psht7\" (UniqueName: \"kubernetes.io/projected/ae1662ba-738d-4085-9744-8ba6b84a1436-kube-api-access-psht7\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.419499 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.442617 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.442981 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.490819 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.499443 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.499546 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mld2\" (UniqueName: \"kubernetes.io/projected/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-kube-api-access-6mld2\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.499587 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.499627 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.499666 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.499712 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.499734 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.499788 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-logs\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.500793 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.506035 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.515474 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-logs\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.515967 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.516275 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.518146 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.529480 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.540726 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mld2\" (UniqueName: \"kubernetes.io/projected/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-kube-api-access-6mld2\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.560471 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.777380 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.922364 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9807c4b8-e2fb-41dc-a0be-b3beac16c378" path="/var/lib/kubelet/pods/9807c4b8-e2fb-41dc-a0be-b3beac16c378/volumes" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.923292 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f73ed3a3-665f-41fb-bb34-aed1820b1ffb" path="/var/lib/kubelet/pods/f73ed3a3-665f-41fb-bb34-aed1820b1ffb/volumes" Dec 03 16:52:44 crc kubenswrapper[5002]: I1203 16:52:44.923930 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-78586996b8-nkxdr"] Dec 03 16:52:45 crc kubenswrapper[5002]: I1203 16:52:45.089458 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-78586996b8-nkxdr" event={"ID":"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0","Type":"ContainerStarted","Data":"fcac53e53514bac34b47de2b8095708950f6f8629b8f5359d65cae13b538d286"} Dec 03 16:52:45 crc kubenswrapper[5002]: I1203 16:52:45.147606 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-68bfc56b4f-vnlr5" event={"ID":"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c","Type":"ContainerStarted","Data":"7319e26425e43fd3866431755e6e1859112eae7b40a15901924150e6da469063"} Dec 03 16:52:45 crc kubenswrapper[5002]: I1203 16:52:45.147670 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-68bfc56b4f-vnlr5" event={"ID":"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c","Type":"ContainerStarted","Data":"483b4040379f35157fa205bb3c7495c4b0ffa2ae303d246e906b2954e53e03f5"} Dec 03 16:52:45 crc kubenswrapper[5002]: I1203 16:52:45.148322 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:52:45 crc kubenswrapper[5002]: I1203 16:52:45.188670 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-68bfc56b4f-vnlr5" podStartSLOduration=3.188645252 podStartE2EDuration="3.188645252s" podCreationTimestamp="2025-12-03 16:52:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:45.188242222 +0000 UTC m=+1288.602064100" watchObservedRunningTime="2025-12-03 16:52:45.188645252 +0000 UTC m=+1288.602467140" Dec 03 16:52:45 crc kubenswrapper[5002]: I1203 16:52:45.316819 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:52:45 crc kubenswrapper[5002]: W1203 16:52:45.345926 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae1662ba_738d_4085_9744_8ba6b84a1436.slice/crio-aa00d32052da85c4f5ac0fa140c2b7fce2c1fe765e3e28be3b6ba6b10efa72d6 WatchSource:0}: Error finding container aa00d32052da85c4f5ac0fa140c2b7fce2c1fe765e3e28be3b6ba6b10efa72d6: Status 404 returned error can't find the container with id aa00d32052da85c4f5ac0fa140c2b7fce2c1fe765e3e28be3b6ba6b10efa72d6 Dec 03 16:52:45 crc kubenswrapper[5002]: I1203 16:52:45.658667 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:52:46 crc kubenswrapper[5002]: I1203 16:52:46.176280 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-78586996b8-nkxdr" event={"ID":"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0","Type":"ContainerStarted","Data":"8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455"} Dec 03 16:52:46 crc kubenswrapper[5002]: I1203 16:52:46.176682 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:52:46 crc kubenswrapper[5002]: I1203 16:52:46.179135 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae1662ba-738d-4085-9744-8ba6b84a1436","Type":"ContainerStarted","Data":"aa00d32052da85c4f5ac0fa140c2b7fce2c1fe765e3e28be3b6ba6b10efa72d6"} Dec 03 16:52:46 crc kubenswrapper[5002]: I1203 16:52:46.881526 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-78586996b8-nkxdr" podStartSLOduration=3.8815029819999998 podStartE2EDuration="3.881502982s" podCreationTimestamp="2025-12-03 16:52:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:46.240057243 +0000 UTC m=+1289.653879131" watchObservedRunningTime="2025-12-03 16:52:46.881502982 +0000 UTC m=+1290.295324870" Dec 03 16:52:47 crc kubenswrapper[5002]: I1203 16:52:47.193029 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae1662ba-738d-4085-9744-8ba6b84a1436","Type":"ContainerStarted","Data":"ca1a99076b561338b712e716e07bc5bdcc6c0f06e1ceb0698bac3baedfccbc16"} Dec 03 16:52:48 crc kubenswrapper[5002]: I1203 16:52:48.866017 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:52:48 crc kubenswrapper[5002]: I1203 16:52:48.940339 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74cd4f877c-5jjbg"] Dec 03 16:52:48 crc kubenswrapper[5002]: I1203 16:52:48.942564 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" podUID="461d51c2-c143-4f94-b3a9-79ccf2c9069b" containerName="dnsmasq-dns" containerID="cri-o://139830ceef2ce1034750af13d72ff194753939ccc81196bd540e80b978b34eaf" gracePeriod=10 Dec 03 16:52:49 crc kubenswrapper[5002]: I1203 16:52:49.221817 5002 generic.go:334] "Generic (PLEG): container finished" podID="461d51c2-c143-4f94-b3a9-79ccf2c9069b" containerID="139830ceef2ce1034750af13d72ff194753939ccc81196bd540e80b978b34eaf" exitCode=0 Dec 03 16:52:49 crc kubenswrapper[5002]: I1203 16:52:49.221873 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" event={"ID":"461d51c2-c143-4f94-b3a9-79ccf2c9069b","Type":"ContainerDied","Data":"139830ceef2ce1034750af13d72ff194753939ccc81196bd540e80b978b34eaf"} Dec 03 16:52:50 crc kubenswrapper[5002]: I1203 16:52:50.391974 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" podUID="461d51c2-c143-4f94-b3a9-79ccf2c9069b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.142:5353: connect: connection refused" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.250882 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf","Type":"ContainerStarted","Data":"f04253a74213b3a3cbb57bb436b1adc48a5f60345eb4a6fa21fc6adaf32dfe46"} Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.366875 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.488372 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rstf\" (UniqueName: \"kubernetes.io/projected/461d51c2-c143-4f94-b3a9-79ccf2c9069b-kube-api-access-9rstf\") pod \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.488881 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-config\") pod \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.488934 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-dns-swift-storage-0\") pod \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.488978 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-ovsdbserver-nb\") pod \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.489000 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-dns-svc\") pod \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.489068 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-ovsdbserver-sb\") pod \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\" (UID: \"461d51c2-c143-4f94-b3a9-79ccf2c9069b\") " Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.500404 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/461d51c2-c143-4f94-b3a9-79ccf2c9069b-kube-api-access-9rstf" (OuterVolumeSpecName: "kube-api-access-9rstf") pod "461d51c2-c143-4f94-b3a9-79ccf2c9069b" (UID: "461d51c2-c143-4f94-b3a9-79ccf2c9069b"). InnerVolumeSpecName "kube-api-access-9rstf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.590995 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rstf\" (UniqueName: \"kubernetes.io/projected/461d51c2-c143-4f94-b3a9-79ccf2c9069b-kube-api-access-9rstf\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.619875 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "461d51c2-c143-4f94-b3a9-79ccf2c9069b" (UID: "461d51c2-c143-4f94-b3a9-79ccf2c9069b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.619892 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "461d51c2-c143-4f94-b3a9-79ccf2c9069b" (UID: "461d51c2-c143-4f94-b3a9-79ccf2c9069b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.698127 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.698160 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.709317 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-config" (OuterVolumeSpecName: "config") pod "461d51c2-c143-4f94-b3a9-79ccf2c9069b" (UID: "461d51c2-c143-4f94-b3a9-79ccf2c9069b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.709455 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "461d51c2-c143-4f94-b3a9-79ccf2c9069b" (UID: "461d51c2-c143-4f94-b3a9-79ccf2c9069b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.716082 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "461d51c2-c143-4f94-b3a9-79ccf2c9069b" (UID: "461d51c2-c143-4f94-b3a9-79ccf2c9069b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.799976 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.800006 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:51 crc kubenswrapper[5002]: I1203 16:52:51.800016 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/461d51c2-c143-4f94-b3a9-79ccf2c9069b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.263338 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf","Type":"ContainerStarted","Data":"7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e"} Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.270347 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada","Type":"ContainerStarted","Data":"be2e5d6e007baa78d12f8547879737ae03d57d45cc4d1b1ffed86002b1832078"} Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.275145 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-pk54x" event={"ID":"99fb9387-8126-480b-a909-f826c3ac626d","Type":"ContainerStarted","Data":"b49eb05bae1106d0f92766371ad241b971af33c597ee16aa00aa4a7e5ac37b88"} Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.283250 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7wlqt" event={"ID":"b6dbbdb3-c51b-4f76-8605-04bcfaa83451","Type":"ContainerStarted","Data":"7f3da2aabf2363fa02a6cad9db5326f4c4f7cfa23643d11c53c73be40df7d4c4"} Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.286603 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" event={"ID":"461d51c2-c143-4f94-b3a9-79ccf2c9069b","Type":"ContainerDied","Data":"1ae477ce6e4a686344f8a52538fa690eca562b276a1d68a2483366850270329b"} Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.286645 5002 scope.go:117] "RemoveContainer" containerID="139830ceef2ce1034750af13d72ff194753939ccc81196bd540e80b978b34eaf" Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.287784 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74cd4f877c-5jjbg" Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.289319 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae1662ba-738d-4085-9744-8ba6b84a1436","Type":"ContainerStarted","Data":"bb4e3d3dd76099fe4e91ffb4bbe623d4107c096ca01fa011f4d0fcecf2ee1e5c"} Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.301506 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-pk54x" podStartSLOduration=3.079033028 podStartE2EDuration="43.301483549s" podCreationTimestamp="2025-12-03 16:52:09 +0000 UTC" firstStartedPulling="2025-12-03 16:52:10.967168503 +0000 UTC m=+1254.380990391" lastFinishedPulling="2025-12-03 16:52:51.189619024 +0000 UTC m=+1294.603440912" observedRunningTime="2025-12-03 16:52:52.296956866 +0000 UTC m=+1295.710778764" watchObservedRunningTime="2025-12-03 16:52:52.301483549 +0000 UTC m=+1295.715305447" Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.319116 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-7wlqt" podStartSLOduration=3.086240474 podStartE2EDuration="43.319087908s" podCreationTimestamp="2025-12-03 16:52:09 +0000 UTC" firstStartedPulling="2025-12-03 16:52:10.967126482 +0000 UTC m=+1254.380948360" lastFinishedPulling="2025-12-03 16:52:51.199973906 +0000 UTC m=+1294.613795794" observedRunningTime="2025-12-03 16:52:52.3162105 +0000 UTC m=+1295.730032388" watchObservedRunningTime="2025-12-03 16:52:52.319087908 +0000 UTC m=+1295.732909806" Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.345208 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.345173858 podStartE2EDuration="8.345173858s" podCreationTimestamp="2025-12-03 16:52:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:52.344308695 +0000 UTC m=+1295.758130573" watchObservedRunningTime="2025-12-03 16:52:52.345173858 +0000 UTC m=+1295.758995746" Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.361835 5002 scope.go:117] "RemoveContainer" containerID="6d66278c7001fdc3e7a1f4271e5f3609eddfa667ec9ae2c18bad610335806764" Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.371595 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74cd4f877c-5jjbg"] Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.385955 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74cd4f877c-5jjbg"] Dec 03 16:52:52 crc kubenswrapper[5002]: I1203 16:52:52.863522 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="461d51c2-c143-4f94-b3a9-79ccf2c9069b" path="/var/lib/kubelet/pods/461d51c2-c143-4f94-b3a9-79ccf2c9069b/volumes" Dec 03 16:52:53 crc kubenswrapper[5002]: I1203 16:52:53.310474 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf","Type":"ContainerStarted","Data":"e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13"} Dec 03 16:52:53 crc kubenswrapper[5002]: I1203 16:52:53.352509 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.352483359 podStartE2EDuration="9.352483359s" podCreationTimestamp="2025-12-03 16:52:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:52:53.336218176 +0000 UTC m=+1296.750040064" watchObservedRunningTime="2025-12-03 16:52:53.352483359 +0000 UTC m=+1296.766305247" Dec 03 16:52:54 crc kubenswrapper[5002]: I1203 16:52:54.324315 5002 generic.go:334] "Generic (PLEG): container finished" podID="99fb9387-8126-480b-a909-f826c3ac626d" containerID="b49eb05bae1106d0f92766371ad241b971af33c597ee16aa00aa4a7e5ac37b88" exitCode=0 Dec 03 16:52:54 crc kubenswrapper[5002]: I1203 16:52:54.324515 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-pk54x" event={"ID":"99fb9387-8126-480b-a909-f826c3ac626d","Type":"ContainerDied","Data":"b49eb05bae1106d0f92766371ad241b971af33c597ee16aa00aa4a7e5ac37b88"} Dec 03 16:52:54 crc kubenswrapper[5002]: I1203 16:52:54.491390 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 16:52:54 crc kubenswrapper[5002]: I1203 16:52:54.491446 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 16:52:54 crc kubenswrapper[5002]: I1203 16:52:54.537167 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 16:52:54 crc kubenswrapper[5002]: I1203 16:52:54.537251 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 16:52:54 crc kubenswrapper[5002]: I1203 16:52:54.779235 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:54 crc kubenswrapper[5002]: I1203 16:52:54.779305 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:54 crc kubenswrapper[5002]: I1203 16:52:54.814804 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:54 crc kubenswrapper[5002]: I1203 16:52:54.861110 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:55 crc kubenswrapper[5002]: I1203 16:52:55.335843 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 16:52:55 crc kubenswrapper[5002]: I1203 16:52:55.335877 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 16:52:55 crc kubenswrapper[5002]: I1203 16:52:55.336064 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:55 crc kubenswrapper[5002]: I1203 16:52:55.336088 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 16:52:57 crc kubenswrapper[5002]: I1203 16:52:57.379247 5002 generic.go:334] "Generic (PLEG): container finished" podID="b6dbbdb3-c51b-4f76-8605-04bcfaa83451" containerID="7f3da2aabf2363fa02a6cad9db5326f4c4f7cfa23643d11c53c73be40df7d4c4" exitCode=0 Dec 03 16:52:57 crc kubenswrapper[5002]: I1203 16:52:57.379348 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7wlqt" event={"ID":"b6dbbdb3-c51b-4f76-8605-04bcfaa83451","Type":"ContainerDied","Data":"7f3da2aabf2363fa02a6cad9db5326f4c4f7cfa23643d11c53c73be40df7d4c4"} Dec 03 16:52:57 crc kubenswrapper[5002]: I1203 16:52:57.547200 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 16:52:58 crc kubenswrapper[5002]: I1203 16:52:58.372618 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 16:52:58 crc kubenswrapper[5002]: I1203 16:52:58.653469 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:58 crc kubenswrapper[5002]: I1203 16:52:58.780805 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99fb9387-8126-480b-a909-f826c3ac626d-combined-ca-bundle\") pod \"99fb9387-8126-480b-a909-f826c3ac626d\" (UID: \"99fb9387-8126-480b-a909-f826c3ac626d\") " Dec 03 16:52:58 crc kubenswrapper[5002]: I1203 16:52:58.780946 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pd4zb\" (UniqueName: \"kubernetes.io/projected/99fb9387-8126-480b-a909-f826c3ac626d-kube-api-access-pd4zb\") pod \"99fb9387-8126-480b-a909-f826c3ac626d\" (UID: \"99fb9387-8126-480b-a909-f826c3ac626d\") " Dec 03 16:52:58 crc kubenswrapper[5002]: I1203 16:52:58.781101 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99fb9387-8126-480b-a909-f826c3ac626d-db-sync-config-data\") pod \"99fb9387-8126-480b-a909-f826c3ac626d\" (UID: \"99fb9387-8126-480b-a909-f826c3ac626d\") " Dec 03 16:52:58 crc kubenswrapper[5002]: I1203 16:52:58.794032 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99fb9387-8126-480b-a909-f826c3ac626d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "99fb9387-8126-480b-a909-f826c3ac626d" (UID: "99fb9387-8126-480b-a909-f826c3ac626d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:58 crc kubenswrapper[5002]: I1203 16:52:58.794086 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99fb9387-8126-480b-a909-f826c3ac626d-kube-api-access-pd4zb" (OuterVolumeSpecName: "kube-api-access-pd4zb") pod "99fb9387-8126-480b-a909-f826c3ac626d" (UID: "99fb9387-8126-480b-a909-f826c3ac626d"). InnerVolumeSpecName "kube-api-access-pd4zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:58 crc kubenswrapper[5002]: I1203 16:52:58.813641 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99fb9387-8126-480b-a909-f826c3ac626d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "99fb9387-8126-480b-a909-f826c3ac626d" (UID: "99fb9387-8126-480b-a909-f826c3ac626d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:58 crc kubenswrapper[5002]: I1203 16:52:58.884948 5002 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99fb9387-8126-480b-a909-f826c3ac626d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:58 crc kubenswrapper[5002]: I1203 16:52:58.884979 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99fb9387-8126-480b-a909-f826c3ac626d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:58 crc kubenswrapper[5002]: I1203 16:52:58.884990 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pd4zb\" (UniqueName: \"kubernetes.io/projected/99fb9387-8126-480b-a909-f826c3ac626d-kube-api-access-pd4zb\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.406071 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-pk54x" event={"ID":"99fb9387-8126-480b-a909-f826c3ac626d","Type":"ContainerDied","Data":"5b5a303ebf9738ce537ec880599578e562f18cd791eb473e80e3e5960ab6fee3"} Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.406116 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b5a303ebf9738ce537ec880599578e562f18cd791eb473e80e3e5960ab6fee3" Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.406180 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-pk54x" Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.974999 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5dc7d59f9c-7zhx2"] Dec 03 16:52:59 crc kubenswrapper[5002]: E1203 16:52:59.975813 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="461d51c2-c143-4f94-b3a9-79ccf2c9069b" containerName="dnsmasq-dns" Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.975827 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="461d51c2-c143-4f94-b3a9-79ccf2c9069b" containerName="dnsmasq-dns" Dec 03 16:52:59 crc kubenswrapper[5002]: E1203 16:52:59.976027 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="461d51c2-c143-4f94-b3a9-79ccf2c9069b" containerName="init" Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.976035 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="461d51c2-c143-4f94-b3a9-79ccf2c9069b" containerName="init" Dec 03 16:52:59 crc kubenswrapper[5002]: E1203 16:52:59.976050 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99fb9387-8126-480b-a909-f826c3ac626d" containerName="barbican-db-sync" Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.976058 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="99fb9387-8126-480b-a909-f826c3ac626d" containerName="barbican-db-sync" Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.976261 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="461d51c2-c143-4f94-b3a9-79ccf2c9069b" containerName="dnsmasq-dns" Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.976281 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="99fb9387-8126-480b-a909-f826c3ac626d" containerName="barbican-db-sync" Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.977301 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.981146 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.981256 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-sjpzf" Dec 03 16:52:59 crc kubenswrapper[5002]: I1203 16:52:59.981393 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.006190 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k92jp\" (UniqueName: \"kubernetes.io/projected/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-kube-api-access-k92jp\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.006316 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-config-data-custom\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.006376 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-config-data\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.006395 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-combined-ca-bundle\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.006455 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-logs\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.010830 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5dc7d59f9c-7zhx2"] Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.056578 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-598cb58b8b-ww24g"] Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.062443 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.066805 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.087254 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-598cb58b8b-ww24g"] Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.112110 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-logs\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.112239 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k92jp\" (UniqueName: \"kubernetes.io/projected/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-kube-api-access-k92jp\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.112302 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-logs\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.112380 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-combined-ca-bundle\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.112411 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-config-data-custom\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.112472 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-config-data-custom\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.112534 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz7g2\" (UniqueName: \"kubernetes.io/projected/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-kube-api-access-wz7g2\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.112563 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-config-data\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.112590 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-combined-ca-bundle\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.112642 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-config-data\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.114069 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-logs\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.127813 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-config-data-custom\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.129468 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-combined-ca-bundle\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.144910 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-config-data\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.155567 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-65dd957765-glwnj"] Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.157261 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.164647 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k92jp\" (UniqueName: \"kubernetes.io/projected/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-kube-api-access-k92jp\") pod \"barbican-worker-5dc7d59f9c-7zhx2\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.168690 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65dd957765-glwnj"] Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.214010 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-logs\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.214097 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-ovsdbserver-nb\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.214133 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-combined-ca-bundle\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.214177 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-config\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.214209 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-config-data-custom\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.214243 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jwl6\" (UniqueName: \"kubernetes.io/projected/1e513765-f651-47b5-9889-a1255f675a88-kube-api-access-8jwl6\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.214266 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-ovsdbserver-sb\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.214303 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz7g2\" (UniqueName: \"kubernetes.io/projected/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-kube-api-access-wz7g2\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.214336 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-config-data\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.214356 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-dns-svc\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.214387 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-dns-swift-storage-0\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.214863 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-logs\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.225098 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-combined-ca-bundle\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.226809 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-config-data\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.245821 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz7g2\" (UniqueName: \"kubernetes.io/projected/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-kube-api-access-wz7g2\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.246571 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-config-data-custom\") pod \"barbican-keystone-listener-598cb58b8b-ww24g\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.274708 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5bfcdd69d4-vcp2c"] Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.285156 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.288572 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.297267 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5bfcdd69d4-vcp2c"] Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.318265 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-config-data-custom\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.318326 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-ovsdbserver-nb\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.318375 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-config\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.318408 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jwl6\" (UniqueName: \"kubernetes.io/projected/1e513765-f651-47b5-9889-a1255f675a88-kube-api-access-8jwl6\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.318426 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-ovsdbserver-sb\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.318463 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-config-data\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.318491 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-dns-svc\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.318525 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-dns-swift-storage-0\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.318542 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7f3f9a7-aeaf-4732-a499-49a1f253e328-logs\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.318577 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvwdp\" (UniqueName: \"kubernetes.io/projected/f7f3f9a7-aeaf-4732-a499-49a1f253e328-kube-api-access-gvwdp\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.318595 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-combined-ca-bundle\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.320936 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.322075 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-ovsdbserver-nb\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.324579 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-config\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.325060 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-dns-svc\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.325084 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-ovsdbserver-sb\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.325687 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-dns-swift-storage-0\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.362919 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jwl6\" (UniqueName: \"kubernetes.io/projected/1e513765-f651-47b5-9889-a1255f675a88-kube-api-access-8jwl6\") pod \"dnsmasq-dns-65dd957765-glwnj\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.411002 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.420623 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvwdp\" (UniqueName: \"kubernetes.io/projected/f7f3f9a7-aeaf-4732-a499-49a1f253e328-kube-api-access-gvwdp\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.420671 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-combined-ca-bundle\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.420730 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-config-data-custom\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.420954 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-config-data\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.421040 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7f3f9a7-aeaf-4732-a499-49a1f253e328-logs\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.421507 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7f3f9a7-aeaf-4732-a499-49a1f253e328-logs\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.429643 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-combined-ca-bundle\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.429263 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-config-data-custom\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.431888 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-config-data\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.442410 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvwdp\" (UniqueName: \"kubernetes.io/projected/f7f3f9a7-aeaf-4732-a499-49a1f253e328-kube-api-access-gvwdp\") pod \"barbican-api-5bfcdd69d4-vcp2c\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.628839 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:00 crc kubenswrapper[5002]: I1203 16:53:00.645415 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.050908 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.136573 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qx87q\" (UniqueName: \"kubernetes.io/projected/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-kube-api-access-qx87q\") pod \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.136674 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-config-data\") pod \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.136816 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-combined-ca-bundle\") pod \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.136922 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-etc-machine-id\") pod \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.137015 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-db-sync-config-data\") pod \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.137046 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-scripts\") pod \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\" (UID: \"b6dbbdb3-c51b-4f76-8605-04bcfaa83451\") " Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.138036 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b6dbbdb3-c51b-4f76-8605-04bcfaa83451" (UID: "b6dbbdb3-c51b-4f76-8605-04bcfaa83451"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.143244 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-scripts" (OuterVolumeSpecName: "scripts") pod "b6dbbdb3-c51b-4f76-8605-04bcfaa83451" (UID: "b6dbbdb3-c51b-4f76-8605-04bcfaa83451"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.145174 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-kube-api-access-qx87q" (OuterVolumeSpecName: "kube-api-access-qx87q") pod "b6dbbdb3-c51b-4f76-8605-04bcfaa83451" (UID: "b6dbbdb3-c51b-4f76-8605-04bcfaa83451"). InnerVolumeSpecName "kube-api-access-qx87q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.145730 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b6dbbdb3-c51b-4f76-8605-04bcfaa83451" (UID: "b6dbbdb3-c51b-4f76-8605-04bcfaa83451"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.184522 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6dbbdb3-c51b-4f76-8605-04bcfaa83451" (UID: "b6dbbdb3-c51b-4f76-8605-04bcfaa83451"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.192191 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-config-data" (OuterVolumeSpecName: "config-data") pod "b6dbbdb3-c51b-4f76-8605-04bcfaa83451" (UID: "b6dbbdb3-c51b-4f76-8605-04bcfaa83451"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.239794 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.239845 5002 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.239855 5002 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.239878 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.239908 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qx87q\" (UniqueName: \"kubernetes.io/projected/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-kube-api-access-qx87q\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.239921 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6dbbdb3-c51b-4f76-8605-04bcfaa83451-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.433393 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7wlqt" event={"ID":"b6dbbdb3-c51b-4f76-8605-04bcfaa83451","Type":"ContainerDied","Data":"7c1c6e76f0b9757a020488156dcfe0e0bfc7ff4153a2769516dab275a9e99306"} Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.433687 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c1c6e76f0b9757a020488156dcfe0e0bfc7ff4153a2769516dab275a9e99306" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.434970 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7wlqt" Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.761148 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65dd957765-glwnj"] Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.775662 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5bfcdd69d4-vcp2c"] Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.885424 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5dc7d59f9c-7zhx2"] Dec 03 16:53:01 crc kubenswrapper[5002]: I1203 16:53:01.908916 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-598cb58b8b-ww24g"] Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.423502 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:53:02 crc kubenswrapper[5002]: E1203 16:53:02.425232 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6dbbdb3-c51b-4f76-8605-04bcfaa83451" containerName="cinder-db-sync" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.425256 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6dbbdb3-c51b-4f76-8605-04bcfaa83451" containerName="cinder-db-sync" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.425451 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6dbbdb3-c51b-4f76-8605-04bcfaa83451" containerName="cinder-db-sync" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.462409 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.480133 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.481828 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-2x66z" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.482100 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.482346 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.492048 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.501808 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65dd957765-glwnj"] Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.505981 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" event={"ID":"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f","Type":"ContainerStarted","Data":"bb591d2b9b74576585ab2eff0ecda2532a6b4df7a8b8d63f3ba4bd6de57ffa72"} Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.520859 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" event={"ID":"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9","Type":"ContainerStarted","Data":"67d6bf380cb559ff2e35e537c0675fb8421a705995c5a62094cc9c0c09299a57"} Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.547312 5002 generic.go:334] "Generic (PLEG): container finished" podID="1e513765-f651-47b5-9889-a1255f675a88" containerID="c091005463c14462ebc8cba1486602c1aaf37dc35621c5d1719b72fef19ae1f0" exitCode=0 Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.547423 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65dd957765-glwnj" event={"ID":"1e513765-f651-47b5-9889-a1255f675a88","Type":"ContainerDied","Data":"c091005463c14462ebc8cba1486602c1aaf37dc35621c5d1719b72fef19ae1f0"} Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.547466 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65dd957765-glwnj" event={"ID":"1e513765-f651-47b5-9889-a1255f675a88","Type":"ContainerStarted","Data":"82759d499047a94038c1b7709873d3cb611e9f034dd38919d1b7e75704ea79c2"} Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.586981 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.587100 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-config-data\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.587154 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.587190 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-scripts\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.587228 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.587266 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gfmz\" (UniqueName: \"kubernetes.io/projected/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-kube-api-access-7gfmz\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.590323 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" event={"ID":"f7f3f9a7-aeaf-4732-a499-49a1f253e328","Type":"ContainerStarted","Data":"0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339"} Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.590378 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" event={"ID":"f7f3f9a7-aeaf-4732-a499-49a1f253e328","Type":"ContainerStarted","Data":"45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56"} Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.590389 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" event={"ID":"f7f3f9a7-aeaf-4732-a499-49a1f253e328","Type":"ContainerStarted","Data":"89eb420a4e83f19996a409b9134f0e9a0d981754b97c3c482380238691e79299"} Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.591584 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.591609 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:02 crc kubenswrapper[5002]: E1203 16:53:02.610349 5002 mount_linux.go:282] Mount failed: exit status 32 Dec 03 16:53:02 crc kubenswrapper[5002]: Mounting command: mount Dec 03 16:53:02 crc kubenswrapper[5002]: Mounting arguments: --no-canonicalize -o bind /proc/5002/fd/21 /var/lib/kubelet/pods/1e513765-f651-47b5-9889-a1255f675a88/volume-subpaths/dns-svc/dnsmasq-dns/1 Dec 03 16:53:02 crc kubenswrapper[5002]: Output: mount: /var/lib/kubelet/pods/1e513765-f651-47b5-9889-a1255f675a88/volume-subpaths/dns-svc/dnsmasq-dns/1: mount(2) system call failed: No such file or directory. Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.618406 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada","Type":"ContainerStarted","Data":"17bf36c3ccbe3a52130640f1f4569deec2e2afd7c7240f1c043f3575f5efd52d"} Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.623006 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="ceilometer-central-agent" containerID="cri-o://b519b295f6530017df4a1adca49e3669922b84f7ef33d1d8e8e7a73e8dc47a30" gracePeriod=30 Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.623460 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.623527 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="proxy-httpd" containerID="cri-o://17bf36c3ccbe3a52130640f1f4569deec2e2afd7c7240f1c043f3575f5efd52d" gracePeriod=30 Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.623603 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="sg-core" containerID="cri-o://be2e5d6e007baa78d12f8547879737ae03d57d45cc4d1b1ffed86002b1832078" gracePeriod=30 Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.623684 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="ceilometer-notification-agent" containerID="cri-o://1834356db2d075b52fe3d8d4adaa3f14c7f2436b9c412aa69dc72aeea55fb5f3" gracePeriod=30 Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.636935 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c77d8b67c-bpckg"] Dec 03 16:53:02 crc kubenswrapper[5002]: E1203 16:53:02.654067 5002 kubelet_pods.go:349] "Failed to prepare subPath for volumeMount of the container" err=< Dec 03 16:53:02 crc kubenswrapper[5002]: error mounting /var/lib/kubelet/pods/1e513765-f651-47b5-9889-a1255f675a88/volumes/kubernetes.io~configmap/dns-svc/..2025_12_03_16_53_00.3558969358/dns-svc: mount failed: exit status 32 Dec 03 16:53:02 crc kubenswrapper[5002]: Mounting command: mount Dec 03 16:53:02 crc kubenswrapper[5002]: Mounting arguments: --no-canonicalize -o bind /proc/5002/fd/21 /var/lib/kubelet/pods/1e513765-f651-47b5-9889-a1255f675a88/volume-subpaths/dns-svc/dnsmasq-dns/1 Dec 03 16:53:02 crc kubenswrapper[5002]: Output: mount: /var/lib/kubelet/pods/1e513765-f651-47b5-9889-a1255f675a88/volume-subpaths/dns-svc/dnsmasq-dns/1: mount(2) system call failed: No such file or directory. Dec 03 16:53:02 crc kubenswrapper[5002]: > containerName="dnsmasq-dns" volumeMountName="dns-svc" Dec 03 16:53:02 crc kubenswrapper[5002]: E1203 16:53:02.654266 5002 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:42f5663a161307156673f86e5eaad59f842a4bf25824f48008e69ab18e4ba792,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n566h86hd4h5f9hc8h599h5h56bh75h554h597h5f4hb7h98h58fh66ch57ch668h5bfhd8h596h68dh54h8ch674h587h5bdhb9hc4h695h5b8hccq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-swift-storage-0,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-swift-storage-0,SubPath:dns-swift-storage-0,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8jwl6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-65dd957765-glwnj_openstack(1e513765-f651-47b5-9889-a1255f675a88): CreateContainerConfigError: failed to prepare subPath for volumeMount \"dns-svc\" of container \"dnsmasq-dns\"" logger="UnhandledError" Dec 03 16:53:02 crc kubenswrapper[5002]: E1203 16:53:02.656991 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerConfigError: \"failed to prepare subPath for volumeMount \\\"dns-svc\\\" of container \\\"dnsmasq-dns\\\"\"" pod="openstack/dnsmasq-dns-65dd957765-glwnj" podUID="1e513765-f651-47b5-9889-a1255f675a88" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.665452 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" podStartSLOduration=2.6654318740000003 podStartE2EDuration="2.665431874s" podCreationTimestamp="2025-12-03 16:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:53:02.628166911 +0000 UTC m=+1306.041988799" watchObservedRunningTime="2025-12-03 16:53:02.665431874 +0000 UTC m=+1306.079253762" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.675533 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c77d8b67c-bpckg"] Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.675708 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.687617 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.439032291 podStartE2EDuration="53.687590757s" podCreationTimestamp="2025-12-03 16:52:09 +0000 UTC" firstStartedPulling="2025-12-03 16:52:10.969918988 +0000 UTC m=+1254.383740866" lastFinishedPulling="2025-12-03 16:53:01.218477434 +0000 UTC m=+1304.632299332" observedRunningTime="2025-12-03 16:53:02.672511768 +0000 UTC m=+1306.086333656" watchObservedRunningTime="2025-12-03 16:53:02.687590757 +0000 UTC m=+1306.101412645" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.689381 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.689489 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-config-data\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.689533 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.689585 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-scripts\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.689617 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.689651 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gfmz\" (UniqueName: \"kubernetes.io/projected/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-kube-api-access-7gfmz\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.692836 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.709285 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-config-data\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.715654 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.718243 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.720619 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-scripts\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.720763 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gfmz\" (UniqueName: \"kubernetes.io/projected/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-kube-api-access-7gfmz\") pod \"cinder-scheduler-0\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.726979 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.728896 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.737098 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.741535 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791049 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-config-data-custom\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791088 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-ovsdbserver-sb\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791123 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791163 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-config\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791189 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjjvg\" (UniqueName: \"kubernetes.io/projected/abbcb731-b955-4e74-98e3-1ddb1db21986-kube-api-access-rjjvg\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791218 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-config-data\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791269 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-ovsdbserver-nb\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791293 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-dns-svc\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791339 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-dns-swift-storage-0\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791362 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5df9c758-f631-4338-8517-249e54d8366f-logs\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791391 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-scripts\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791407 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g8jb\" (UniqueName: \"kubernetes.io/projected/5df9c758-f631-4338-8517-249e54d8366f-kube-api-access-8g8jb\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.791446 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5df9c758-f631-4338-8517-249e54d8366f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.825032 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.897949 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-dns-svc\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.898013 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-dns-swift-storage-0\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.898046 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5df9c758-f631-4338-8517-249e54d8366f-logs\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.898099 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-scripts\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.898116 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g8jb\" (UniqueName: \"kubernetes.io/projected/5df9c758-f631-4338-8517-249e54d8366f-kube-api-access-8g8jb\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.898159 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5df9c758-f631-4338-8517-249e54d8366f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.898207 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-config-data-custom\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.898223 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-ovsdbserver-sb\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.898257 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.898301 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-config\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.898320 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjjvg\" (UniqueName: \"kubernetes.io/projected/abbcb731-b955-4e74-98e3-1ddb1db21986-kube-api-access-rjjvg\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.898347 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-config-data\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.898371 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-ovsdbserver-nb\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.899313 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-ovsdbserver-nb\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.899868 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-dns-svc\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.900448 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-ovsdbserver-sb\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.901185 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-dns-swift-storage-0\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.901358 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-config\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.901553 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5df9c758-f631-4338-8517-249e54d8366f-logs\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.901595 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5df9c758-f631-4338-8517-249e54d8366f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.913038 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.913597 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-scripts\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.915451 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-config-data-custom\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.927613 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjjvg\" (UniqueName: \"kubernetes.io/projected/abbcb731-b955-4e74-98e3-1ddb1db21986-kube-api-access-rjjvg\") pod \"dnsmasq-dns-5c77d8b67c-bpckg\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.928025 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g8jb\" (UniqueName: \"kubernetes.io/projected/5df9c758-f631-4338-8517-249e54d8366f-kube-api-access-8g8jb\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:02 crc kubenswrapper[5002]: I1203 16:53:02.933777 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-config-data\") pod \"cinder-api-0\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " pod="openstack/cinder-api-0" Dec 03 16:53:03 crc kubenswrapper[5002]: I1203 16:53:03.109640 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:03 crc kubenswrapper[5002]: I1203 16:53:03.120475 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:53:03 crc kubenswrapper[5002]: I1203 16:53:03.396875 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:53:03 crc kubenswrapper[5002]: I1203 16:53:03.638757 5002 generic.go:334] "Generic (PLEG): container finished" podID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerID="17bf36c3ccbe3a52130640f1f4569deec2e2afd7c7240f1c043f3575f5efd52d" exitCode=0 Dec 03 16:53:03 crc kubenswrapper[5002]: I1203 16:53:03.639186 5002 generic.go:334] "Generic (PLEG): container finished" podID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerID="be2e5d6e007baa78d12f8547879737ae03d57d45cc4d1b1ffed86002b1832078" exitCode=2 Dec 03 16:53:03 crc kubenswrapper[5002]: I1203 16:53:03.639199 5002 generic.go:334] "Generic (PLEG): container finished" podID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerID="b519b295f6530017df4a1adca49e3669922b84f7ef33d1d8e8e7a73e8dc47a30" exitCode=0 Dec 03 16:53:03 crc kubenswrapper[5002]: I1203 16:53:03.639249 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada","Type":"ContainerDied","Data":"17bf36c3ccbe3a52130640f1f4569deec2e2afd7c7240f1c043f3575f5efd52d"} Dec 03 16:53:03 crc kubenswrapper[5002]: I1203 16:53:03.639288 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada","Type":"ContainerDied","Data":"be2e5d6e007baa78d12f8547879737ae03d57d45cc4d1b1ffed86002b1832078"} Dec 03 16:53:03 crc kubenswrapper[5002]: I1203 16:53:03.639300 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada","Type":"ContainerDied","Data":"b519b295f6530017df4a1adca49e3669922b84f7ef33d1d8e8e7a73e8dc47a30"} Dec 03 16:53:03 crc kubenswrapper[5002]: I1203 16:53:03.641417 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1dd80080-ccac-4b1d-b8b1-c8d871dc10df","Type":"ContainerStarted","Data":"7ad55b7b3ec79a7909975988ef1993fbb5fb11f7b521c39615140c0d652edd07"} Dec 03 16:53:03 crc kubenswrapper[5002]: I1203 16:53:03.662574 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c77d8b67c-bpckg"] Dec 03 16:53:03 crc kubenswrapper[5002]: I1203 16:53:03.731648 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.046931 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.137857 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-dns-swift-storage-0\") pod \"1e513765-f651-47b5-9889-a1255f675a88\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.138394 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jwl6\" (UniqueName: \"kubernetes.io/projected/1e513765-f651-47b5-9889-a1255f675a88-kube-api-access-8jwl6\") pod \"1e513765-f651-47b5-9889-a1255f675a88\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.138430 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-ovsdbserver-sb\") pod \"1e513765-f651-47b5-9889-a1255f675a88\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.138547 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-config\") pod \"1e513765-f651-47b5-9889-a1255f675a88\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.138606 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-ovsdbserver-nb\") pod \"1e513765-f651-47b5-9889-a1255f675a88\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.138646 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-dns-svc\") pod \"1e513765-f651-47b5-9889-a1255f675a88\" (UID: \"1e513765-f651-47b5-9889-a1255f675a88\") " Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.182740 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1e513765-f651-47b5-9889-a1255f675a88" (UID: "1e513765-f651-47b5-9889-a1255f675a88"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.185864 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e513765-f651-47b5-9889-a1255f675a88-kube-api-access-8jwl6" (OuterVolumeSpecName: "kube-api-access-8jwl6") pod "1e513765-f651-47b5-9889-a1255f675a88" (UID: "1e513765-f651-47b5-9889-a1255f675a88"). InnerVolumeSpecName "kube-api-access-8jwl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.206560 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1e513765-f651-47b5-9889-a1255f675a88" (UID: "1e513765-f651-47b5-9889-a1255f675a88"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.229370 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1e513765-f651-47b5-9889-a1255f675a88" (UID: "1e513765-f651-47b5-9889-a1255f675a88"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.229487 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1e513765-f651-47b5-9889-a1255f675a88" (UID: "1e513765-f651-47b5-9889-a1255f675a88"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.243622 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.243650 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jwl6\" (UniqueName: \"kubernetes.io/projected/1e513765-f651-47b5-9889-a1255f675a88-kube-api-access-8jwl6\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.243693 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.243706 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.243716 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.258565 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-config" (OuterVolumeSpecName: "config") pod "1e513765-f651-47b5-9889-a1255f675a88" (UID: "1e513765-f651-47b5-9889-a1255f675a88"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.346260 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e513765-f651-47b5-9889-a1255f675a88-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.651036 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65dd957765-glwnj" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.653036 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65dd957765-glwnj" event={"ID":"1e513765-f651-47b5-9889-a1255f675a88","Type":"ContainerDied","Data":"82759d499047a94038c1b7709873d3cb611e9f034dd38919d1b7e75704ea79c2"} Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.653141 5002 scope.go:117] "RemoveContainer" containerID="c091005463c14462ebc8cba1486602c1aaf37dc35621c5d1719b72fef19ae1f0" Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.659592 5002 generic.go:334] "Generic (PLEG): container finished" podID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerID="1834356db2d075b52fe3d8d4adaa3f14c7f2436b9c412aa69dc72aeea55fb5f3" exitCode=0 Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.659920 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada","Type":"ContainerDied","Data":"1834356db2d075b52fe3d8d4adaa3f14c7f2436b9c412aa69dc72aeea55fb5f3"} Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.726368 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65dd957765-glwnj"] Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.738429 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-65dd957765-glwnj"] Dec 03 16:53:04 crc kubenswrapper[5002]: W1203 16:53:04.800497 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5df9c758_f631_4338_8517_249e54d8366f.slice/crio-0123faaa4dd135450618fcc6e57b4de135d52ccec7471ade5dd3779a0983a377 WatchSource:0}: Error finding container 0123faaa4dd135450618fcc6e57b4de135d52ccec7471ade5dd3779a0983a377: Status 404 returned error can't find the container with id 0123faaa4dd135450618fcc6e57b4de135d52ccec7471ade5dd3779a0983a377 Dec 03 16:53:04 crc kubenswrapper[5002]: W1203 16:53:04.803326 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabbcb731_b955_4e74_98e3_1ddb1db21986.slice/crio-00442de7db9c9528550efc46b221d466e288ca8624e7e91d5c9fb65df8b50b2b WatchSource:0}: Error finding container 00442de7db9c9528550efc46b221d466e288ca8624e7e91d5c9fb65df8b50b2b: Status 404 returned error can't find the container with id 00442de7db9c9528550efc46b221d466e288ca8624e7e91d5c9fb65df8b50b2b Dec 03 16:53:04 crc kubenswrapper[5002]: I1203 16:53:04.858371 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e513765-f651-47b5-9889-a1255f675a88" path="/var/lib/kubelet/pods/1e513765-f651-47b5-9889-a1255f675a88/volumes" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.108910 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.165628 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-run-httpd\") pod \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.165729 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-sg-core-conf-yaml\") pod \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.165851 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcfh5\" (UniqueName: \"kubernetes.io/projected/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-kube-api-access-jcfh5\") pod \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.165872 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-log-httpd\") pod \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.165916 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-scripts\") pod \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.165965 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-combined-ca-bundle\") pod \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.166024 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-config-data\") pod \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\" (UID: \"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada\") " Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.166190 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" (UID: "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.166457 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.166607 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" (UID: "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.172795 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-scripts" (OuterVolumeSpecName: "scripts") pod "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" (UID: "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.172989 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-kube-api-access-jcfh5" (OuterVolumeSpecName: "kube-api-access-jcfh5") pod "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" (UID: "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada"). InnerVolumeSpecName "kube-api-access-jcfh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.195227 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" (UID: "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.257646 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" (UID: "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.269396 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.269446 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcfh5\" (UniqueName: \"kubernetes.io/projected/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-kube-api-access-jcfh5\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.269464 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.269477 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.269489 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.270342 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-config-data" (OuterVolumeSpecName: "config-data") pod "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" (UID: "f2c68c24-7b74-4eb6-b4e3-d5dabca39ada"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.372844 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.675768 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" event={"ID":"abbcb731-b955-4e74-98e3-1ddb1db21986","Type":"ContainerStarted","Data":"00442de7db9c9528550efc46b221d466e288ca8624e7e91d5c9fb65df8b50b2b"} Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.682550 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"5df9c758-f631-4338-8517-249e54d8366f","Type":"ContainerStarted","Data":"0123faaa4dd135450618fcc6e57b4de135d52ccec7471ade5dd3779a0983a377"} Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.691351 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f2c68c24-7b74-4eb6-b4e3-d5dabca39ada","Type":"ContainerDied","Data":"fbd599ef932d4d9007cbf45e8e388188180d7c7fa8d147a436b163b44d2d9764"} Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.691406 5002 scope.go:117] "RemoveContainer" containerID="17bf36c3ccbe3a52130640f1f4569deec2e2afd7c7240f1c043f3575f5efd52d" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.691435 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.727307 5002 scope.go:117] "RemoveContainer" containerID="be2e5d6e007baa78d12f8547879737ae03d57d45cc4d1b1ffed86002b1832078" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.734849 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.744849 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.770332 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:05 crc kubenswrapper[5002]: E1203 16:53:05.770795 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="ceilometer-central-agent" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.770821 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="ceilometer-central-agent" Dec 03 16:53:05 crc kubenswrapper[5002]: E1203 16:53:05.770841 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="ceilometer-notification-agent" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.770849 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="ceilometer-notification-agent" Dec 03 16:53:05 crc kubenswrapper[5002]: E1203 16:53:05.770864 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="sg-core" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.770871 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="sg-core" Dec 03 16:53:05 crc kubenswrapper[5002]: E1203 16:53:05.770894 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="proxy-httpd" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.770903 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="proxy-httpd" Dec 03 16:53:05 crc kubenswrapper[5002]: E1203 16:53:05.770937 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e513765-f651-47b5-9889-a1255f675a88" containerName="init" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.770944 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e513765-f651-47b5-9889-a1255f675a88" containerName="init" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.771126 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="proxy-httpd" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.771150 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e513765-f651-47b5-9889-a1255f675a88" containerName="init" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.771162 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="ceilometer-central-agent" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.771173 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="ceilometer-notification-agent" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.771193 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" containerName="sg-core" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.773238 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.777325 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.777803 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.791788 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.813560 5002 scope.go:117] "RemoveContainer" containerID="1834356db2d075b52fe3d8d4adaa3f14c7f2436b9c412aa69dc72aeea55fb5f3" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.861804 5002 scope.go:117] "RemoveContainer" containerID="b519b295f6530017df4a1adca49e3669922b84f7ef33d1d8e8e7a73e8dc47a30" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.882583 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7gjs\" (UniqueName: \"kubernetes.io/projected/f7a8caaf-1637-4514-be33-8499030e8f2c-kube-api-access-v7gjs\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.882652 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7a8caaf-1637-4514-be33-8499030e8f2c-run-httpd\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.882706 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.882728 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-scripts\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.882791 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.882850 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-config-data\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.882886 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7a8caaf-1637-4514-be33-8499030e8f2c-log-httpd\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.985220 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7gjs\" (UniqueName: \"kubernetes.io/projected/f7a8caaf-1637-4514-be33-8499030e8f2c-kube-api-access-v7gjs\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.985287 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7a8caaf-1637-4514-be33-8499030e8f2c-run-httpd\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.985345 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.985371 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-scripts\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.985400 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.985464 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-config-data\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.985504 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7a8caaf-1637-4514-be33-8499030e8f2c-log-httpd\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.986345 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7a8caaf-1637-4514-be33-8499030e8f2c-log-httpd\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.986979 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7a8caaf-1637-4514-be33-8499030e8f2c-run-httpd\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:05 crc kubenswrapper[5002]: I1203 16:53:05.994835 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:06 crc kubenswrapper[5002]: I1203 16:53:05.999608 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-scripts\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:06 crc kubenswrapper[5002]: I1203 16:53:06.000137 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-config-data\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:06 crc kubenswrapper[5002]: I1203 16:53:06.000559 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:06 crc kubenswrapper[5002]: I1203 16:53:06.005798 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7gjs\" (UniqueName: \"kubernetes.io/projected/f7a8caaf-1637-4514-be33-8499030e8f2c-kube-api-access-v7gjs\") pod \"ceilometer-0\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " pod="openstack/ceilometer-0" Dec 03 16:53:06 crc kubenswrapper[5002]: I1203 16:53:06.112652 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:53:06 crc kubenswrapper[5002]: I1203 16:53:06.608696 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:06 crc kubenswrapper[5002]: I1203 16:53:06.708065 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7a8caaf-1637-4514-be33-8499030e8f2c","Type":"ContainerStarted","Data":"e9816aeae3ee4c144ccc7696bb72da657d591fcf6b282b6f1f26fb3c2cdb855e"} Dec 03 16:53:06 crc kubenswrapper[5002]: I1203 16:53:06.861014 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2c68c24-7b74-4eb6-b4e3-d5dabca39ada" path="/var/lib/kubelet/pods/f2c68c24-7b74-4eb6-b4e3-d5dabca39ada/volumes" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.213803 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6b89c68cbb-nkz44"] Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.215921 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.220169 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.220420 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.235843 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.246297 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6b89c68cbb-nkz44"] Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.326901 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-public-tls-certs\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.327019 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-internal-tls-certs\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.327197 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-config-data-custom\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.327271 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-config-data\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.327346 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-combined-ca-bundle\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.327414 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wrgn\" (UniqueName: \"kubernetes.io/projected/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-kube-api-access-8wrgn\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.327462 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-logs\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.429557 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-public-tls-certs\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.429606 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-internal-tls-certs\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.429687 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-config-data-custom\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.429711 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-config-data\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.429734 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-combined-ca-bundle\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.429767 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wrgn\" (UniqueName: \"kubernetes.io/projected/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-kube-api-access-8wrgn\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.429796 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-logs\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.430282 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-logs\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.435209 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-public-tls-certs\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.438795 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-config-data\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.439506 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-combined-ca-bundle\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.440400 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-config-data-custom\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.449994 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-internal-tls-certs\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.471652 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wrgn\" (UniqueName: \"kubernetes.io/projected/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-kube-api-access-8wrgn\") pod \"barbican-api-6b89c68cbb-nkz44\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.681174 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.767071 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1dd80080-ccac-4b1d-b8b1-c8d871dc10df","Type":"ContainerStarted","Data":"6974ea1536ae484536bff1b25fc8346de18c8bdab6990d1c9329574613a858ed"} Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.808728 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"5df9c758-f631-4338-8517-249e54d8366f","Type":"ContainerStarted","Data":"16aa4532ab7988ace60fdf85050ace2835af9e6bdb1c376121ff1fcfbbf98348"} Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.822048 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" event={"ID":"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9","Type":"ContainerStarted","Data":"8a095cbc28aea7f906ad627bf6f7c5227b893239574390ebaaeddf3c3b84026c"} Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.822100 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" event={"ID":"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9","Type":"ContainerStarted","Data":"49cfe11824388fcd621bf5c8c3ab0301531171637a9eff8d8df877c7b70ebe69"} Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.825614 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" event={"ID":"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f","Type":"ContainerStarted","Data":"58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544"} Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.825643 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" event={"ID":"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f","Type":"ContainerStarted","Data":"7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7"} Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.848328 5002 generic.go:334] "Generic (PLEG): container finished" podID="abbcb731-b955-4e74-98e3-1ddb1db21986" containerID="259f208880e1cc53be0e2e5cb718c302f0e48e2958135f7b69b3439b59bb0c0f" exitCode=0 Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.848680 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" event={"ID":"abbcb731-b955-4e74-98e3-1ddb1db21986","Type":"ContainerDied","Data":"259f208880e1cc53be0e2e5cb718c302f0e48e2958135f7b69b3439b59bb0c0f"} Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.870349 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" podStartSLOduration=5.395281191 podStartE2EDuration="8.870333601s" podCreationTimestamp="2025-12-03 16:52:59 +0000 UTC" firstStartedPulling="2025-12-03 16:53:01.906918222 +0000 UTC m=+1305.320740110" lastFinishedPulling="2025-12-03 16:53:05.381970612 +0000 UTC m=+1308.795792520" observedRunningTime="2025-12-03 16:53:07.867625687 +0000 UTC m=+1311.281447575" watchObservedRunningTime="2025-12-03 16:53:07.870333601 +0000 UTC m=+1311.284155489" Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.879609 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7a8caaf-1637-4514-be33-8499030e8f2c","Type":"ContainerStarted","Data":"db8e5aba3b2516d10809dceee9f4571fbb87fcb962c34ab8930090c4fbb179fc"} Dec 03 16:53:07 crc kubenswrapper[5002]: I1203 16:53:07.905809 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" podStartSLOduration=5.431094785 podStartE2EDuration="8.905774555s" podCreationTimestamp="2025-12-03 16:52:59 +0000 UTC" firstStartedPulling="2025-12-03 16:53:01.906646434 +0000 UTC m=+1305.320468322" lastFinishedPulling="2025-12-03 16:53:05.381326164 +0000 UTC m=+1308.795148092" observedRunningTime="2025-12-03 16:53:07.904986714 +0000 UTC m=+1311.318808602" watchObservedRunningTime="2025-12-03 16:53:07.905774555 +0000 UTC m=+1311.319596443" Dec 03 16:53:08 crc kubenswrapper[5002]: I1203 16:53:08.461740 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6b89c68cbb-nkz44"] Dec 03 16:53:08 crc kubenswrapper[5002]: I1203 16:53:08.894290 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b89c68cbb-nkz44" event={"ID":"55e94451-ebc7-4a6c-9927-df89ae0fc3c2","Type":"ContainerStarted","Data":"f37222b16941070545bc5cdbfabb844500ba2cc8fe4270bf53f73f00520a5183"} Dec 03 16:53:08 crc kubenswrapper[5002]: I1203 16:53:08.903699 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" event={"ID":"abbcb731-b955-4e74-98e3-1ddb1db21986","Type":"ContainerStarted","Data":"c41e19901492ef72054fcae791fadc714779ad1c6de3ffc8143c85fb719d7d7c"} Dec 03 16:53:08 crc kubenswrapper[5002]: I1203 16:53:08.903804 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:08 crc kubenswrapper[5002]: I1203 16:53:08.910083 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7a8caaf-1637-4514-be33-8499030e8f2c","Type":"ContainerStarted","Data":"d64b47ba74fdc1c6cef5b48b1d27ae90b27462a281cc82783a9f72382667484d"} Dec 03 16:53:08 crc kubenswrapper[5002]: I1203 16:53:08.921996 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1dd80080-ccac-4b1d-b8b1-c8d871dc10df","Type":"ContainerStarted","Data":"a960c2038494740bd1e06a783f8207a95be589cbfeb6e819e4b2d741cdce9288"} Dec 03 16:53:08 crc kubenswrapper[5002]: I1203 16:53:08.929480 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" podStartSLOduration=6.929465952 podStartE2EDuration="6.929465952s" podCreationTimestamp="2025-12-03 16:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:53:08.926186403 +0000 UTC m=+1312.340008281" watchObservedRunningTime="2025-12-03 16:53:08.929465952 +0000 UTC m=+1312.343287840" Dec 03 16:53:08 crc kubenswrapper[5002]: I1203 16:53:08.936886 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"5df9c758-f631-4338-8517-249e54d8366f","Type":"ContainerStarted","Data":"aec6edb8f1b76d628871860591b2f3c4ab08e7ef77418d0b6d428c2649333400"} Dec 03 16:53:08 crc kubenswrapper[5002]: I1203 16:53:08.937153 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="5df9c758-f631-4338-8517-249e54d8366f" containerName="cinder-api-log" containerID="cri-o://16aa4532ab7988ace60fdf85050ace2835af9e6bdb1c376121ff1fcfbbf98348" gracePeriod=30 Dec 03 16:53:08 crc kubenswrapper[5002]: I1203 16:53:08.937331 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 16:53:08 crc kubenswrapper[5002]: I1203 16:53:08.937368 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="5df9c758-f631-4338-8517-249e54d8366f" containerName="cinder-api" containerID="cri-o://aec6edb8f1b76d628871860591b2f3c4ab08e7ef77418d0b6d428c2649333400" gracePeriod=30 Dec 03 16:53:08 crc kubenswrapper[5002]: I1203 16:53:08.985622 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.95824045 podStartE2EDuration="6.985597709s" podCreationTimestamp="2025-12-03 16:53:02 +0000 UTC" firstStartedPulling="2025-12-03 16:53:03.439528922 +0000 UTC m=+1306.853350800" lastFinishedPulling="2025-12-03 16:53:05.466886171 +0000 UTC m=+1308.880708059" observedRunningTime="2025-12-03 16:53:08.964058363 +0000 UTC m=+1312.377880251" watchObservedRunningTime="2025-12-03 16:53:08.985597709 +0000 UTC m=+1312.399419597" Dec 03 16:53:09 crc kubenswrapper[5002]: I1203 16:53:09.008170 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=7.008147783 podStartE2EDuration="7.008147783s" podCreationTimestamp="2025-12-03 16:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:53:09.004208326 +0000 UTC m=+1312.418030214" watchObservedRunningTime="2025-12-03 16:53:09.008147783 +0000 UTC m=+1312.421969671" Dec 03 16:53:09 crc kubenswrapper[5002]: I1203 16:53:09.063222 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:53:09 crc kubenswrapper[5002]: I1203 16:53:09.985480 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b89c68cbb-nkz44" event={"ID":"55e94451-ebc7-4a6c-9927-df89ae0fc3c2","Type":"ContainerStarted","Data":"7782f9e82bd0b16c1ea4af876571de089d43551370a7363494d32a492da6fdb9"} Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.005184 5002 generic.go:334] "Generic (PLEG): container finished" podID="5df9c758-f631-4338-8517-249e54d8366f" containerID="aec6edb8f1b76d628871860591b2f3c4ab08e7ef77418d0b6d428c2649333400" exitCode=0 Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.005239 5002 generic.go:334] "Generic (PLEG): container finished" podID="5df9c758-f631-4338-8517-249e54d8366f" containerID="16aa4532ab7988ace60fdf85050ace2835af9e6bdb1c376121ff1fcfbbf98348" exitCode=143 Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.005244 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"5df9c758-f631-4338-8517-249e54d8366f","Type":"ContainerDied","Data":"aec6edb8f1b76d628871860591b2f3c4ab08e7ef77418d0b6d428c2649333400"} Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.005311 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"5df9c758-f631-4338-8517-249e54d8366f","Type":"ContainerDied","Data":"16aa4532ab7988ace60fdf85050ace2835af9e6bdb1c376121ff1fcfbbf98348"} Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.063638 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.152003 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-scripts\") pod \"5df9c758-f631-4338-8517-249e54d8366f\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.152049 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5df9c758-f631-4338-8517-249e54d8366f-etc-machine-id\") pod \"5df9c758-f631-4338-8517-249e54d8366f\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.152109 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-config-data\") pod \"5df9c758-f631-4338-8517-249e54d8366f\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.152160 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8g8jb\" (UniqueName: \"kubernetes.io/projected/5df9c758-f631-4338-8517-249e54d8366f-kube-api-access-8g8jb\") pod \"5df9c758-f631-4338-8517-249e54d8366f\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.152287 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5df9c758-f631-4338-8517-249e54d8366f-logs\") pod \"5df9c758-f631-4338-8517-249e54d8366f\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.152338 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-combined-ca-bundle\") pod \"5df9c758-f631-4338-8517-249e54d8366f\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.152419 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-config-data-custom\") pod \"5df9c758-f631-4338-8517-249e54d8366f\" (UID: \"5df9c758-f631-4338-8517-249e54d8366f\") " Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.156339 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5df9c758-f631-4338-8517-249e54d8366f-logs" (OuterVolumeSpecName: "logs") pod "5df9c758-f631-4338-8517-249e54d8366f" (UID: "5df9c758-f631-4338-8517-249e54d8366f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.157884 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5df9c758-f631-4338-8517-249e54d8366f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "5df9c758-f631-4338-8517-249e54d8366f" (UID: "5df9c758-f631-4338-8517-249e54d8366f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.162808 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-scripts" (OuterVolumeSpecName: "scripts") pod "5df9c758-f631-4338-8517-249e54d8366f" (UID: "5df9c758-f631-4338-8517-249e54d8366f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.166272 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5df9c758-f631-4338-8517-249e54d8366f-kube-api-access-8g8jb" (OuterVolumeSpecName: "kube-api-access-8g8jb") pod "5df9c758-f631-4338-8517-249e54d8366f" (UID: "5df9c758-f631-4338-8517-249e54d8366f"). InnerVolumeSpecName "kube-api-access-8g8jb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.175873 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5df9c758-f631-4338-8517-249e54d8366f" (UID: "5df9c758-f631-4338-8517-249e54d8366f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.206781 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5df9c758-f631-4338-8517-249e54d8366f" (UID: "5df9c758-f631-4338-8517-249e54d8366f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.236881 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-config-data" (OuterVolumeSpecName: "config-data") pod "5df9c758-f631-4338-8517-249e54d8366f" (UID: "5df9c758-f631-4338-8517-249e54d8366f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.255478 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5df9c758-f631-4338-8517-249e54d8366f-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.255524 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.255540 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.255549 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.255559 5002 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5df9c758-f631-4338-8517-249e54d8366f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.255568 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5df9c758-f631-4338-8517-249e54d8366f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:10 crc kubenswrapper[5002]: I1203 16:53:10.255583 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8g8jb\" (UniqueName: \"kubernetes.io/projected/5df9c758-f631-4338-8517-249e54d8366f-kube-api-access-8g8jb\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.016639 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b89c68cbb-nkz44" event={"ID":"55e94451-ebc7-4a6c-9927-df89ae0fc3c2","Type":"ContainerStarted","Data":"2ac72ccfe2ed308920637f371bbf7fd278ac3de12deca33a58820b774a39eb81"} Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.018326 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.018356 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.021430 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7a8caaf-1637-4514-be33-8499030e8f2c","Type":"ContainerStarted","Data":"a53f2135c6ac7d2dafe3358a17c3920f584f9ac6879719de032b05223fb24d45"} Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.024351 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"5df9c758-f631-4338-8517-249e54d8366f","Type":"ContainerDied","Data":"0123faaa4dd135450618fcc6e57b4de135d52ccec7471ade5dd3779a0983a377"} Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.024419 5002 scope.go:117] "RemoveContainer" containerID="aec6edb8f1b76d628871860591b2f3c4ab08e7ef77418d0b6d428c2649333400" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.024437 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.048866 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6b89c68cbb-nkz44" podStartSLOduration=4.048835304 podStartE2EDuration="4.048835304s" podCreationTimestamp="2025-12-03 16:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:53:11.043480388 +0000 UTC m=+1314.457302276" watchObservedRunningTime="2025-12-03 16:53:11.048835304 +0000 UTC m=+1314.462657222" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.054662 5002 scope.go:117] "RemoveContainer" containerID="16aa4532ab7988ace60fdf85050ace2835af9e6bdb1c376121ff1fcfbbf98348" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.156069 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.181876 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.192953 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:53:11 crc kubenswrapper[5002]: E1203 16:53:11.193428 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5df9c758-f631-4338-8517-249e54d8366f" containerName="cinder-api-log" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.193449 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5df9c758-f631-4338-8517-249e54d8366f" containerName="cinder-api-log" Dec 03 16:53:11 crc kubenswrapper[5002]: E1203 16:53:11.193470 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5df9c758-f631-4338-8517-249e54d8366f" containerName="cinder-api" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.193479 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5df9c758-f631-4338-8517-249e54d8366f" containerName="cinder-api" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.193655 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5df9c758-f631-4338-8517-249e54d8366f" containerName="cinder-api-log" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.193675 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5df9c758-f631-4338-8517-249e54d8366f" containerName="cinder-api" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.195083 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.199572 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.200184 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.200515 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.206655 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.306205 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/965b05ab-f8e9-485e-9f15-2160a598d8c2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.306284 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/965b05ab-f8e9-485e-9f15-2160a598d8c2-logs\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.306315 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-config-data-custom\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.306380 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-scripts\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.306408 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.306462 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-config-data\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.306497 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.306525 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlb5c\" (UniqueName: \"kubernetes.io/projected/965b05ab-f8e9-485e-9f15-2160a598d8c2-kube-api-access-jlb5c\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.306561 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.409867 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-scripts\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.410419 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.410868 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-config-data\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.410995 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.411035 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlb5c\" (UniqueName: \"kubernetes.io/projected/965b05ab-f8e9-485e-9f15-2160a598d8c2-kube-api-access-jlb5c\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.411141 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.411287 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/965b05ab-f8e9-485e-9f15-2160a598d8c2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.411399 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/965b05ab-f8e9-485e-9f15-2160a598d8c2-logs\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.411430 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-config-data-custom\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.411466 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/965b05ab-f8e9-485e-9f15-2160a598d8c2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.412248 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/965b05ab-f8e9-485e-9f15-2160a598d8c2-logs\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.415993 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.419551 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.420220 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-config-data-custom\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.429165 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.430067 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-config-data\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.433336 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-scripts\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.437486 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlb5c\" (UniqueName: \"kubernetes.io/projected/965b05ab-f8e9-485e-9f15-2160a598d8c2-kube-api-access-jlb5c\") pod \"cinder-api-0\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " pod="openstack/cinder-api-0" Dec 03 16:53:11 crc kubenswrapper[5002]: I1203 16:53:11.515739 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:53:12 crc kubenswrapper[5002]: I1203 16:53:12.095595 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:53:12 crc kubenswrapper[5002]: I1203 16:53:12.567565 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:12 crc kubenswrapper[5002]: I1203 16:53:12.645323 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:12 crc kubenswrapper[5002]: I1203 16:53:12.827251 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 16:53:12 crc kubenswrapper[5002]: I1203 16:53:12.865266 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5df9c758-f631-4338-8517-249e54d8366f" path="/var/lib/kubelet/pods/5df9c758-f631-4338-8517-249e54d8366f/volumes" Dec 03 16:53:12 crc kubenswrapper[5002]: I1203 16:53:12.889860 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.007159 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-74dcd656b8-rkf4g"] Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.007446 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-74dcd656b8-rkf4g" podUID="ac2907af-7d49-4ebb-bdbb-7a82ae373d89" containerName="neutron-api" containerID="cri-o://0a0bf0b3d26021ee54bf8dfe431f204384455cf492c689ea5d631b9323f1e58d" gracePeriod=30 Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.008158 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-74dcd656b8-rkf4g" podUID="ac2907af-7d49-4ebb-bdbb-7a82ae373d89" containerName="neutron-httpd" containerID="cri-o://e481f0b1d53253d1fe2c40539ac4388ae516d6d690ccae3394dead0a3c95281c" gracePeriod=30 Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.082272 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7a8caaf-1637-4514-be33-8499030e8f2c","Type":"ContainerStarted","Data":"60b90dcd5f81fb034df8898781ce4271b579a7d5a0e342bfde9510be1d1288c6"} Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.084393 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.105378 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"965b05ab-f8e9-485e-9f15-2160a598d8c2","Type":"ContainerStarted","Data":"7289b73f03ab749a5e919fc53efa0fd71d6720a44c71c21cabbcd8b11406b07e"} Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.105436 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"965b05ab-f8e9-485e-9f15-2160a598d8c2","Type":"ContainerStarted","Data":"4a1b82ddb6e16ce60bc6498d7b7a32be41d92d2e455b72a8607ee043219a55d3"} Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.118533 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.748969369 podStartE2EDuration="8.118503514s" podCreationTimestamp="2025-12-03 16:53:05 +0000 UTC" firstStartedPulling="2025-12-03 16:53:06.621714555 +0000 UTC m=+1310.035536443" lastFinishedPulling="2025-12-03 16:53:11.99124869 +0000 UTC m=+1315.405070588" observedRunningTime="2025-12-03 16:53:13.106390315 +0000 UTC m=+1316.520212213" watchObservedRunningTime="2025-12-03 16:53:13.118503514 +0000 UTC m=+1316.532325402" Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.118931 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.219539 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-849ff95dc5-rrgdn"] Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.219863 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" podUID="790a58d7-9e20-43fb-a3d4-92d99c595b88" containerName="dnsmasq-dns" containerID="cri-o://3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679" gracePeriod=10 Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.352848 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.432911 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:53:13 crc kubenswrapper[5002]: I1203 16:53:13.868563 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.019808 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-config\") pod \"790a58d7-9e20-43fb-a3d4-92d99c595b88\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.019904 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-ovsdbserver-sb\") pod \"790a58d7-9e20-43fb-a3d4-92d99c595b88\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.019977 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-dns-swift-storage-0\") pod \"790a58d7-9e20-43fb-a3d4-92d99c595b88\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.020086 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-ovsdbserver-nb\") pod \"790a58d7-9e20-43fb-a3d4-92d99c595b88\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.020145 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-dns-svc\") pod \"790a58d7-9e20-43fb-a3d4-92d99c595b88\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.020298 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmgj7\" (UniqueName: \"kubernetes.io/projected/790a58d7-9e20-43fb-a3d4-92d99c595b88-kube-api-access-fmgj7\") pod \"790a58d7-9e20-43fb-a3d4-92d99c595b88\" (UID: \"790a58d7-9e20-43fb-a3d4-92d99c595b88\") " Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.062074 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/790a58d7-9e20-43fb-a3d4-92d99c595b88-kube-api-access-fmgj7" (OuterVolumeSpecName: "kube-api-access-fmgj7") pod "790a58d7-9e20-43fb-a3d4-92d99c595b88" (UID: "790a58d7-9e20-43fb-a3d4-92d99c595b88"). InnerVolumeSpecName "kube-api-access-fmgj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.130237 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmgj7\" (UniqueName: \"kubernetes.io/projected/790a58d7-9e20-43fb-a3d4-92d99c595b88-kube-api-access-fmgj7\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.130541 5002 generic.go:334] "Generic (PLEG): container finished" podID="ac2907af-7d49-4ebb-bdbb-7a82ae373d89" containerID="e481f0b1d53253d1fe2c40539ac4388ae516d6d690ccae3394dead0a3c95281c" exitCode=0 Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.130595 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74dcd656b8-rkf4g" event={"ID":"ac2907af-7d49-4ebb-bdbb-7a82ae373d89","Type":"ContainerDied","Data":"e481f0b1d53253d1fe2c40539ac4388ae516d6d690ccae3394dead0a3c95281c"} Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.150627 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "790a58d7-9e20-43fb-a3d4-92d99c595b88" (UID: "790a58d7-9e20-43fb-a3d4-92d99c595b88"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.154624 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "790a58d7-9e20-43fb-a3d4-92d99c595b88" (UID: "790a58d7-9e20-43fb-a3d4-92d99c595b88"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.157269 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "790a58d7-9e20-43fb-a3d4-92d99c595b88" (UID: "790a58d7-9e20-43fb-a3d4-92d99c595b88"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.157915 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"965b05ab-f8e9-485e-9f15-2160a598d8c2","Type":"ContainerStarted","Data":"2c7e91463c94f3cc50714dbf46be64e0f5e71d87a2dbb56ce0212f820db59c76"} Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.158179 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.170072 5002 generic.go:334] "Generic (PLEG): container finished" podID="790a58d7-9e20-43fb-a3d4-92d99c595b88" containerID="3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679" exitCode=0 Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.170341 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="1dd80080-ccac-4b1d-b8b1-c8d871dc10df" containerName="cinder-scheduler" containerID="cri-o://6974ea1536ae484536bff1b25fc8346de18c8bdab6990d1c9329574613a858ed" gracePeriod=30 Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.170563 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="1dd80080-ccac-4b1d-b8b1-c8d871dc10df" containerName="probe" containerID="cri-o://a960c2038494740bd1e06a783f8207a95be589cbfeb6e819e4b2d741cdce9288" gracePeriod=30 Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.170650 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.170705 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" event={"ID":"790a58d7-9e20-43fb-a3d4-92d99c595b88","Type":"ContainerDied","Data":"3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679"} Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.170741 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-849ff95dc5-rrgdn" event={"ID":"790a58d7-9e20-43fb-a3d4-92d99c595b88","Type":"ContainerDied","Data":"ce5ed39f8a3e09676fab39bdd8f69459afc3af484bc1d35b973c21e0c07a9794"} Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.170778 5002 scope.go:117] "RemoveContainer" containerID="3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.195057 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.195026258 podStartE2EDuration="3.195026258s" podCreationTimestamp="2025-12-03 16:53:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:53:14.186150727 +0000 UTC m=+1317.599972615" watchObservedRunningTime="2025-12-03 16:53:14.195026258 +0000 UTC m=+1317.608848146" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.207439 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "790a58d7-9e20-43fb-a3d4-92d99c595b88" (UID: "790a58d7-9e20-43fb-a3d4-92d99c595b88"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.215687 5002 scope.go:117] "RemoveContainer" containerID="e90a94d89f57632d70021e7cc676820573fc45f261b65ba19712cf18ea23827d" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.235305 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-config" (OuterVolumeSpecName: "config") pod "790a58d7-9e20-43fb-a3d4-92d99c595b88" (UID: "790a58d7-9e20-43fb-a3d4-92d99c595b88"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.235330 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.237880 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.237918 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.237936 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.237953 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.237967 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/790a58d7-9e20-43fb-a3d4-92d99c595b88-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.238992 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.265193 5002 scope.go:117] "RemoveContainer" containerID="3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679" Dec 03 16:53:14 crc kubenswrapper[5002]: E1203 16:53:14.270143 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679\": container with ID starting with 3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679 not found: ID does not exist" containerID="3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.270192 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679"} err="failed to get container status \"3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679\": rpc error: code = NotFound desc = could not find container \"3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679\": container with ID starting with 3443b0898f1d62d763eccd4dfd371cf8bcf911785ef35fad3f204ad2cf7fc679 not found: ID does not exist" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.270221 5002 scope.go:117] "RemoveContainer" containerID="e90a94d89f57632d70021e7cc676820573fc45f261b65ba19712cf18ea23827d" Dec 03 16:53:14 crc kubenswrapper[5002]: E1203 16:53:14.278951 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e90a94d89f57632d70021e7cc676820573fc45f261b65ba19712cf18ea23827d\": container with ID starting with e90a94d89f57632d70021e7cc676820573fc45f261b65ba19712cf18ea23827d not found: ID does not exist" containerID="e90a94d89f57632d70021e7cc676820573fc45f261b65ba19712cf18ea23827d" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.278997 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e90a94d89f57632d70021e7cc676820573fc45f261b65ba19712cf18ea23827d"} err="failed to get container status \"e90a94d89f57632d70021e7cc676820573fc45f261b65ba19712cf18ea23827d\": rpc error: code = NotFound desc = could not find container \"e90a94d89f57632d70021e7cc676820573fc45f261b65ba19712cf18ea23827d\": container with ID starting with e90a94d89f57632d70021e7cc676820573fc45f261b65ba19712cf18ea23827d not found: ID does not exist" Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.531314 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-849ff95dc5-rrgdn"] Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.556906 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-849ff95dc5-rrgdn"] Dec 03 16:53:14 crc kubenswrapper[5002]: I1203 16:53:14.852132 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="790a58d7-9e20-43fb-a3d4-92d99c595b88" path="/var/lib/kubelet/pods/790a58d7-9e20-43fb-a3d4-92d99c595b88/volumes" Dec 03 16:53:15 crc kubenswrapper[5002]: I1203 16:53:15.182435 5002 generic.go:334] "Generic (PLEG): container finished" podID="1dd80080-ccac-4b1d-b8b1-c8d871dc10df" containerID="a960c2038494740bd1e06a783f8207a95be589cbfeb6e819e4b2d741cdce9288" exitCode=0 Dec 03 16:53:15 crc kubenswrapper[5002]: I1203 16:53:15.182497 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1dd80080-ccac-4b1d-b8b1-c8d871dc10df","Type":"ContainerDied","Data":"a960c2038494740bd1e06a783f8207a95be589cbfeb6e819e4b2d741cdce9288"} Dec 03 16:53:15 crc kubenswrapper[5002]: I1203 16:53:15.744456 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.196992 5002 generic.go:334] "Generic (PLEG): container finished" podID="1dd80080-ccac-4b1d-b8b1-c8d871dc10df" containerID="6974ea1536ae484536bff1b25fc8346de18c8bdab6990d1c9329574613a858ed" exitCode=0 Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.197048 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1dd80080-ccac-4b1d-b8b1-c8d871dc10df","Type":"ContainerDied","Data":"6974ea1536ae484536bff1b25fc8346de18c8bdab6990d1c9329574613a858ed"} Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.733319 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.798443 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-scripts\") pod \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.798592 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-combined-ca-bundle\") pod \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.798836 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-config-data\") pod \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.798887 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gfmz\" (UniqueName: \"kubernetes.io/projected/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-kube-api-access-7gfmz\") pod \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.798912 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-etc-machine-id\") pod \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.799110 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-config-data-custom\") pod \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\" (UID: \"1dd80080-ccac-4b1d-b8b1-c8d871dc10df\") " Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.805346 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1dd80080-ccac-4b1d-b8b1-c8d871dc10df" (UID: "1dd80080-ccac-4b1d-b8b1-c8d871dc10df"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.805876 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-scripts" (OuterVolumeSpecName: "scripts") pod "1dd80080-ccac-4b1d-b8b1-c8d871dc10df" (UID: "1dd80080-ccac-4b1d-b8b1-c8d871dc10df"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.816858 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1dd80080-ccac-4b1d-b8b1-c8d871dc10df" (UID: "1dd80080-ccac-4b1d-b8b1-c8d871dc10df"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.842090 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.842899 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.842921 5002 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.845769 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-kube-api-access-7gfmz" (OuterVolumeSpecName: "kube-api-access-7gfmz") pod "1dd80080-ccac-4b1d-b8b1-c8d871dc10df" (UID: "1dd80080-ccac-4b1d-b8b1-c8d871dc10df"). InnerVolumeSpecName "kube-api-access-7gfmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.949757 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gfmz\" (UniqueName: \"kubernetes.io/projected/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-kube-api-access-7gfmz\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:16 crc kubenswrapper[5002]: I1203 16:53:16.975102 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1dd80080-ccac-4b1d-b8b1-c8d871dc10df" (UID: "1dd80080-ccac-4b1d-b8b1-c8d871dc10df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.052079 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.097942 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-config-data" (OuterVolumeSpecName: "config-data") pod "1dd80080-ccac-4b1d-b8b1-c8d871dc10df" (UID: "1dd80080-ccac-4b1d-b8b1-c8d871dc10df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.154314 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dd80080-ccac-4b1d-b8b1-c8d871dc10df-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.210089 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"1dd80080-ccac-4b1d-b8b1-c8d871dc10df","Type":"ContainerDied","Data":"7ad55b7b3ec79a7909975988ef1993fbb5fb11f7b521c39615140c0d652edd07"} Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.210166 5002 scope.go:117] "RemoveContainer" containerID="a960c2038494740bd1e06a783f8207a95be589cbfeb6e819e4b2d741cdce9288" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.211228 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.238452 5002 scope.go:117] "RemoveContainer" containerID="6974ea1536ae484536bff1b25fc8346de18c8bdab6990d1c9329574613a858ed" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.283876 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.300534 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.307974 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:53:17 crc kubenswrapper[5002]: E1203 16:53:17.308571 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="790a58d7-9e20-43fb-a3d4-92d99c595b88" containerName="init" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.308591 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="790a58d7-9e20-43fb-a3d4-92d99c595b88" containerName="init" Dec 03 16:53:17 crc kubenswrapper[5002]: E1203 16:53:17.308610 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dd80080-ccac-4b1d-b8b1-c8d871dc10df" containerName="probe" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.308618 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dd80080-ccac-4b1d-b8b1-c8d871dc10df" containerName="probe" Dec 03 16:53:17 crc kubenswrapper[5002]: E1203 16:53:17.308645 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="790a58d7-9e20-43fb-a3d4-92d99c595b88" containerName="dnsmasq-dns" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.308652 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="790a58d7-9e20-43fb-a3d4-92d99c595b88" containerName="dnsmasq-dns" Dec 03 16:53:17 crc kubenswrapper[5002]: E1203 16:53:17.308669 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dd80080-ccac-4b1d-b8b1-c8d871dc10df" containerName="cinder-scheduler" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.308676 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dd80080-ccac-4b1d-b8b1-c8d871dc10df" containerName="cinder-scheduler" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.308875 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dd80080-ccac-4b1d-b8b1-c8d871dc10df" containerName="probe" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.308887 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="790a58d7-9e20-43fb-a3d4-92d99c595b88" containerName="dnsmasq-dns" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.308909 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dd80080-ccac-4b1d-b8b1-c8d871dc10df" containerName="cinder-scheduler" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.310139 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.315606 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.335041 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.361519 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.361571 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.361607 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-config-data\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.361643 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-scripts\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.361666 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36469a67-4d79-419f-9aaf-a1c128132287-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.361739 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgd8t\" (UniqueName: \"kubernetes.io/projected/36469a67-4d79-419f-9aaf-a1c128132287-kube-api-access-dgd8t\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.416045 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.464414 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-scripts\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.464490 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36469a67-4d79-419f-9aaf-a1c128132287-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.464623 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgd8t\" (UniqueName: \"kubernetes.io/projected/36469a67-4d79-419f-9aaf-a1c128132287-kube-api-access-dgd8t\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.464650 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36469a67-4d79-419f-9aaf-a1c128132287-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.466028 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.466076 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.466229 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-config-data\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.476357 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-scripts\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.476966 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-config-data\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.480027 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.489618 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.542443 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgd8t\" (UniqueName: \"kubernetes.io/projected/36469a67-4d79-419f-9aaf-a1c128132287-kube-api-access-dgd8t\") pod \"cinder-scheduler-0\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " pod="openstack/cinder-scheduler-0" Dec 03 16:53:17 crc kubenswrapper[5002]: I1203 16:53:17.635483 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:53:18 crc kubenswrapper[5002]: I1203 16:53:18.197382 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:18 crc kubenswrapper[5002]: I1203 16:53:18.203110 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:18 crc kubenswrapper[5002]: I1203 16:53:18.399312 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:53:18 crc kubenswrapper[5002]: I1203 16:53:18.855190 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1dd80080-ccac-4b1d-b8b1-c8d871dc10df" path="/var/lib/kubelet/pods/1dd80080-ccac-4b1d-b8b1-c8d871dc10df/volumes" Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.193115 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.324529 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"36469a67-4d79-419f-9aaf-a1c128132287","Type":"ContainerStarted","Data":"4135f456101b8d8bc1d6aa3e05e38d4e5f0009555d8fd482836038f10a45877a"} Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.324968 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"36469a67-4d79-419f-9aaf-a1c128132287","Type":"ContainerStarted","Data":"2d492b8022017b9dbc249e071e8d5a9308210ceff225b48a8d905e35c20c28a8"} Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.346112 5002 generic.go:334] "Generic (PLEG): container finished" podID="ac2907af-7d49-4ebb-bdbb-7a82ae373d89" containerID="0a0bf0b3d26021ee54bf8dfe431f204384455cf492c689ea5d631b9323f1e58d" exitCode=0 Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.346199 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74dcd656b8-rkf4g" event={"ID":"ac2907af-7d49-4ebb-bdbb-7a82ae373d89","Type":"ContainerDied","Data":"0a0bf0b3d26021ee54bf8dfe431f204384455cf492c689ea5d631b9323f1e58d"} Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.464384 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5bfcdd69d4-vcp2c"] Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.473955 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" podUID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" containerName="barbican-api-log" containerID="cri-o://45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56" gracePeriod=30 Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.473889 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" podUID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" containerName="barbican-api" containerID="cri-o://0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339" gracePeriod=30 Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.614788 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.635604 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-config\") pod \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.635671 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-ovndb-tls-certs\") pod \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.635711 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-combined-ca-bundle\") pod \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.635928 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rn2r6\" (UniqueName: \"kubernetes.io/projected/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-kube-api-access-rn2r6\") pod \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.674313 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-kube-api-access-rn2r6" (OuterVolumeSpecName: "kube-api-access-rn2r6") pod "ac2907af-7d49-4ebb-bdbb-7a82ae373d89" (UID: "ac2907af-7d49-4ebb-bdbb-7a82ae373d89"). InnerVolumeSpecName "kube-api-access-rn2r6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.712871 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ac2907af-7d49-4ebb-bdbb-7a82ae373d89" (UID: "ac2907af-7d49-4ebb-bdbb-7a82ae373d89"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.738467 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-httpd-config\") pod \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\" (UID: \"ac2907af-7d49-4ebb-bdbb-7a82ae373d89\") " Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.739324 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rn2r6\" (UniqueName: \"kubernetes.io/projected/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-kube-api-access-rn2r6\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.739348 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.747836 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "ac2907af-7d49-4ebb-bdbb-7a82ae373d89" (UID: "ac2907af-7d49-4ebb-bdbb-7a82ae373d89"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.763618 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-config" (OuterVolumeSpecName: "config") pod "ac2907af-7d49-4ebb-bdbb-7a82ae373d89" (UID: "ac2907af-7d49-4ebb-bdbb-7a82ae373d89"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.842128 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.842164 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.860926 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "ac2907af-7d49-4ebb-bdbb-7a82ae373d89" (UID: "ac2907af-7d49-4ebb-bdbb-7a82ae373d89"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:19 crc kubenswrapper[5002]: I1203 16:53:19.943456 5002 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac2907af-7d49-4ebb-bdbb-7a82ae373d89-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.280079 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 16:53:20 crc kubenswrapper[5002]: E1203 16:53:20.281143 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac2907af-7d49-4ebb-bdbb-7a82ae373d89" containerName="neutron-httpd" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.281170 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac2907af-7d49-4ebb-bdbb-7a82ae373d89" containerName="neutron-httpd" Dec 03 16:53:20 crc kubenswrapper[5002]: E1203 16:53:20.281206 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac2907af-7d49-4ebb-bdbb-7a82ae373d89" containerName="neutron-api" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.281215 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac2907af-7d49-4ebb-bdbb-7a82ae373d89" containerName="neutron-api" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.281465 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac2907af-7d49-4ebb-bdbb-7a82ae373d89" containerName="neutron-api" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.281495 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac2907af-7d49-4ebb-bdbb-7a82ae373d89" containerName="neutron-httpd" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.282376 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.287843 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.288610 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.289272 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-pb4fn" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.310957 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.352194 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqtz9\" (UniqueName: \"kubernetes.io/projected/1823be31-afb8-4085-a9a1-f1b75c65f3a2-kube-api-access-qqtz9\") pod \"openstackclient\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.352496 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1823be31-afb8-4085-a9a1-f1b75c65f3a2-openstack-config-secret\") pod \"openstackclient\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.352577 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1823be31-afb8-4085-a9a1-f1b75c65f3a2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.352668 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1823be31-afb8-4085-a9a1-f1b75c65f3a2-openstack-config\") pod \"openstackclient\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.359064 5002 generic.go:334] "Generic (PLEG): container finished" podID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" containerID="45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56" exitCode=143 Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.359164 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" event={"ID":"f7f3f9a7-aeaf-4732-a499-49a1f253e328","Type":"ContainerDied","Data":"45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56"} Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.361266 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-74dcd656b8-rkf4g" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.361251 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-74dcd656b8-rkf4g" event={"ID":"ac2907af-7d49-4ebb-bdbb-7a82ae373d89","Type":"ContainerDied","Data":"a8a99b25d1037355f443a3757774d4f814a214c2673d674411a685828cef092e"} Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.361525 5002 scope.go:117] "RemoveContainer" containerID="e481f0b1d53253d1fe2c40539ac4388ae516d6d690ccae3394dead0a3c95281c" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.363359 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"36469a67-4d79-419f-9aaf-a1c128132287","Type":"ContainerStarted","Data":"4bc8f24bf14262c55e9f63c9738562230246183c67d42270335aee3f4e625213"} Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.388780 5002 scope.go:117] "RemoveContainer" containerID="0a0bf0b3d26021ee54bf8dfe431f204384455cf492c689ea5d631b9323f1e58d" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.411233 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.411208154 podStartE2EDuration="3.411208154s" podCreationTimestamp="2025-12-03 16:53:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:53:20.406453535 +0000 UTC m=+1323.820275423" watchObservedRunningTime="2025-12-03 16:53:20.411208154 +0000 UTC m=+1323.825030042" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.435948 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-74dcd656b8-rkf4g"] Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.447995 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-74dcd656b8-rkf4g"] Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.455290 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1823be31-afb8-4085-a9a1-f1b75c65f3a2-openstack-config-secret\") pod \"openstackclient\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.455362 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1823be31-afb8-4085-a9a1-f1b75c65f3a2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.455431 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1823be31-afb8-4085-a9a1-f1b75c65f3a2-openstack-config\") pod \"openstackclient\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.455609 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqtz9\" (UniqueName: \"kubernetes.io/projected/1823be31-afb8-4085-a9a1-f1b75c65f3a2-kube-api-access-qqtz9\") pod \"openstackclient\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.459533 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1823be31-afb8-4085-a9a1-f1b75c65f3a2-openstack-config\") pod \"openstackclient\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.461405 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1823be31-afb8-4085-a9a1-f1b75c65f3a2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.474800 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqtz9\" (UniqueName: \"kubernetes.io/projected/1823be31-afb8-4085-a9a1-f1b75c65f3a2-kube-api-access-qqtz9\") pod \"openstackclient\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.481125 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1823be31-afb8-4085-a9a1-f1b75c65f3a2-openstack-config-secret\") pod \"openstackclient\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.607522 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 16:53:20 crc kubenswrapper[5002]: I1203 16:53:20.904226 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac2907af-7d49-4ebb-bdbb-7a82ae373d89" path="/var/lib/kubelet/pods/ac2907af-7d49-4ebb-bdbb-7a82ae373d89/volumes" Dec 03 16:53:21 crc kubenswrapper[5002]: I1203 16:53:21.140994 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 16:53:21 crc kubenswrapper[5002]: W1203 16:53:21.153878 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1823be31_afb8_4085_a9a1_f1b75c65f3a2.slice/crio-da606d2f1a7ff6326d13ffe23eedfd09498cebb6a73e3955431be1c6faf1aaf5 WatchSource:0}: Error finding container da606d2f1a7ff6326d13ffe23eedfd09498cebb6a73e3955431be1c6faf1aaf5: Status 404 returned error can't find the container with id da606d2f1a7ff6326d13ffe23eedfd09498cebb6a73e3955431be1c6faf1aaf5 Dec 03 16:53:21 crc kubenswrapper[5002]: I1203 16:53:21.381555 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1823be31-afb8-4085-a9a1-f1b75c65f3a2","Type":"ContainerStarted","Data":"da606d2f1a7ff6326d13ffe23eedfd09498cebb6a73e3955431be1c6faf1aaf5"} Dec 03 16:53:22 crc kubenswrapper[5002]: I1203 16:53:22.640929 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 16:53:22 crc kubenswrapper[5002]: I1203 16:53:22.687707 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" podUID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.157:9311/healthcheck\": read tcp 10.217.0.2:44052->10.217.0.157:9311: read: connection reset by peer" Dec 03 16:53:22 crc kubenswrapper[5002]: I1203 16:53:22.687884 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" podUID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.157:9311/healthcheck\": read tcp 10.217.0.2:44062->10.217.0.157:9311: read: connection reset by peer" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.144178 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.213318 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7f3f9a7-aeaf-4732-a499-49a1f253e328-logs\") pod \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.213396 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-config-data\") pod \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.213431 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-combined-ca-bundle\") pod \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.213456 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvwdp\" (UniqueName: \"kubernetes.io/projected/f7f3f9a7-aeaf-4732-a499-49a1f253e328-kube-api-access-gvwdp\") pod \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.213484 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-config-data-custom\") pod \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\" (UID: \"f7f3f9a7-aeaf-4732-a499-49a1f253e328\") " Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.214463 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7f3f9a7-aeaf-4732-a499-49a1f253e328-logs" (OuterVolumeSpecName: "logs") pod "f7f3f9a7-aeaf-4732-a499-49a1f253e328" (UID: "f7f3f9a7-aeaf-4732-a499-49a1f253e328"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.220738 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f7f3f9a7-aeaf-4732-a499-49a1f253e328" (UID: "f7f3f9a7-aeaf-4732-a499-49a1f253e328"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.220790 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7f3f9a7-aeaf-4732-a499-49a1f253e328-kube-api-access-gvwdp" (OuterVolumeSpecName: "kube-api-access-gvwdp") pod "f7f3f9a7-aeaf-4732-a499-49a1f253e328" (UID: "f7f3f9a7-aeaf-4732-a499-49a1f253e328"). InnerVolumeSpecName "kube-api-access-gvwdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.260967 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7f3f9a7-aeaf-4732-a499-49a1f253e328" (UID: "f7f3f9a7-aeaf-4732-a499-49a1f253e328"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.276019 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-config-data" (OuterVolumeSpecName: "config-data") pod "f7f3f9a7-aeaf-4732-a499-49a1f253e328" (UID: "f7f3f9a7-aeaf-4732-a499-49a1f253e328"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.317250 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.317294 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7f3f9a7-aeaf-4732-a499-49a1f253e328-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.317306 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.317322 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f3f9a7-aeaf-4732-a499-49a1f253e328-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.317335 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvwdp\" (UniqueName: \"kubernetes.io/projected/f7f3f9a7-aeaf-4732-a499-49a1f253e328-kube-api-access-gvwdp\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.404853 5002 generic.go:334] "Generic (PLEG): container finished" podID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" containerID="0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339" exitCode=0 Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.405089 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" event={"ID":"f7f3f9a7-aeaf-4732-a499-49a1f253e328","Type":"ContainerDied","Data":"0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339"} Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.405277 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.405345 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5bfcdd69d4-vcp2c" event={"ID":"f7f3f9a7-aeaf-4732-a499-49a1f253e328","Type":"ContainerDied","Data":"89eb420a4e83f19996a409b9134f0e9a0d981754b97c3c482380238691e79299"} Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.405373 5002 scope.go:117] "RemoveContainer" containerID="0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.430107 5002 scope.go:117] "RemoveContainer" containerID="45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.445718 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5bfcdd69d4-vcp2c"] Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.467573 5002 scope.go:117] "RemoveContainer" containerID="0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339" Dec 03 16:53:23 crc kubenswrapper[5002]: E1203 16:53:23.468177 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339\": container with ID starting with 0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339 not found: ID does not exist" containerID="0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.468303 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339"} err="failed to get container status \"0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339\": rpc error: code = NotFound desc = could not find container \"0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339\": container with ID starting with 0d41b43e02b8ad5d1d8f52852cbbf33d82ea75b53fc49390b7e2f741e94b9339 not found: ID does not exist" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.468411 5002 scope.go:117] "RemoveContainer" containerID="45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56" Dec 03 16:53:23 crc kubenswrapper[5002]: E1203 16:53:23.469299 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56\": container with ID starting with 45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56 not found: ID does not exist" containerID="45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.469342 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56"} err="failed to get container status \"45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56\": rpc error: code = NotFound desc = could not find container \"45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56\": container with ID starting with 45495fc92b9fa5bbf38198fbdf5d2afc816151105de4a2b77515c7ac7b1b7d56 not found: ID does not exist" Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.469664 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5bfcdd69d4-vcp2c"] Dec 03 16:53:23 crc kubenswrapper[5002]: I1203 16:53:23.801023 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 03 16:53:24 crc kubenswrapper[5002]: I1203 16:53:24.856541 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" path="/var/lib/kubelet/pods/f7f3f9a7-aeaf-4732-a499-49a1f253e328/volumes" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.231067 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-df99c8b7f-mljz8"] Dec 03 16:53:26 crc kubenswrapper[5002]: E1203 16:53:26.232055 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" containerName="barbican-api" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.232075 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" containerName="barbican-api" Dec 03 16:53:26 crc kubenswrapper[5002]: E1203 16:53:26.232125 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" containerName="barbican-api-log" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.232134 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" containerName="barbican-api-log" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.232550 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" containerName="barbican-api" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.232573 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7f3f9a7-aeaf-4732-a499-49a1f253e328" containerName="barbican-api-log" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.234381 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.285513 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.285842 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.285977 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.291309 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27fbw\" (UniqueName: \"kubernetes.io/projected/30820296-8679-481c-9466-014d473e51ee-kube-api-access-27fbw\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.291426 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-internal-tls-certs\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.291483 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-config-data\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.291689 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30820296-8679-481c-9466-014d473e51ee-etc-swift\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.291760 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30820296-8679-481c-9466-014d473e51ee-log-httpd\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.291954 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-combined-ca-bundle\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.291983 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30820296-8679-481c-9466-014d473e51ee-run-httpd\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.292130 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-public-tls-certs\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.313974 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-df99c8b7f-mljz8"] Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.393097 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-internal-tls-certs\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.393192 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-config-data\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.393237 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30820296-8679-481c-9466-014d473e51ee-etc-swift\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.393258 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30820296-8679-481c-9466-014d473e51ee-log-httpd\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.393309 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30820296-8679-481c-9466-014d473e51ee-run-httpd\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.393326 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-combined-ca-bundle\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.393369 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-public-tls-certs\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.393412 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27fbw\" (UniqueName: \"kubernetes.io/projected/30820296-8679-481c-9466-014d473e51ee-kube-api-access-27fbw\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.394652 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30820296-8679-481c-9466-014d473e51ee-run-httpd\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.395268 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30820296-8679-481c-9466-014d473e51ee-log-httpd\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.401842 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-config-data\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.401841 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-internal-tls-certs\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.402060 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30820296-8679-481c-9466-014d473e51ee-etc-swift\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.403436 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-combined-ca-bundle\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.403626 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-public-tls-certs\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.416646 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27fbw\" (UniqueName: \"kubernetes.io/projected/30820296-8679-481c-9466-014d473e51ee-kube-api-access-27fbw\") pod \"swift-proxy-df99c8b7f-mljz8\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:26 crc kubenswrapper[5002]: I1203 16:53:26.609903 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:27 crc kubenswrapper[5002]: I1203 16:53:27.614205 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:27 crc kubenswrapper[5002]: I1203 16:53:27.614543 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="ceilometer-central-agent" containerID="cri-o://db8e5aba3b2516d10809dceee9f4571fbb87fcb962c34ab8930090c4fbb179fc" gracePeriod=30 Dec 03 16:53:27 crc kubenswrapper[5002]: I1203 16:53:27.614609 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="sg-core" containerID="cri-o://a53f2135c6ac7d2dafe3358a17c3920f584f9ac6879719de032b05223fb24d45" gracePeriod=30 Dec 03 16:53:27 crc kubenswrapper[5002]: I1203 16:53:27.614707 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="ceilometer-notification-agent" containerID="cri-o://d64b47ba74fdc1c6cef5b48b1d27ae90b27462a281cc82783a9f72382667484d" gracePeriod=30 Dec 03 16:53:27 crc kubenswrapper[5002]: I1203 16:53:27.614773 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="proxy-httpd" containerID="cri-o://60b90dcd5f81fb034df8898781ce4271b579a7d5a0e342bfde9510be1d1288c6" gracePeriod=30 Dec 03 16:53:27 crc kubenswrapper[5002]: I1203 16:53:27.628519 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.161:3000/\": EOF" Dec 03 16:53:27 crc kubenswrapper[5002]: I1203 16:53:27.964698 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 16:53:28 crc kubenswrapper[5002]: I1203 16:53:28.482156 5002 generic.go:334] "Generic (PLEG): container finished" podID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerID="60b90dcd5f81fb034df8898781ce4271b579a7d5a0e342bfde9510be1d1288c6" exitCode=0 Dec 03 16:53:28 crc kubenswrapper[5002]: I1203 16:53:28.482203 5002 generic.go:334] "Generic (PLEG): container finished" podID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerID="a53f2135c6ac7d2dafe3358a17c3920f584f9ac6879719de032b05223fb24d45" exitCode=2 Dec 03 16:53:28 crc kubenswrapper[5002]: I1203 16:53:28.482217 5002 generic.go:334] "Generic (PLEG): container finished" podID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerID="db8e5aba3b2516d10809dceee9f4571fbb87fcb962c34ab8930090c4fbb179fc" exitCode=0 Dec 03 16:53:28 crc kubenswrapper[5002]: I1203 16:53:28.482235 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7a8caaf-1637-4514-be33-8499030e8f2c","Type":"ContainerDied","Data":"60b90dcd5f81fb034df8898781ce4271b579a7d5a0e342bfde9510be1d1288c6"} Dec 03 16:53:28 crc kubenswrapper[5002]: I1203 16:53:28.482361 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7a8caaf-1637-4514-be33-8499030e8f2c","Type":"ContainerDied","Data":"a53f2135c6ac7d2dafe3358a17c3920f584f9ac6879719de032b05223fb24d45"} Dec 03 16:53:28 crc kubenswrapper[5002]: I1203 16:53:28.482384 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7a8caaf-1637-4514-be33-8499030e8f2c","Type":"ContainerDied","Data":"db8e5aba3b2516d10809dceee9f4571fbb87fcb962c34ab8930090c4fbb179fc"} Dec 03 16:53:29 crc kubenswrapper[5002]: I1203 16:53:29.499326 5002 generic.go:334] "Generic (PLEG): container finished" podID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerID="d64b47ba74fdc1c6cef5b48b1d27ae90b27462a281cc82783a9f72382667484d" exitCode=0 Dec 03 16:53:29 crc kubenswrapper[5002]: I1203 16:53:29.499402 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7a8caaf-1637-4514-be33-8499030e8f2c","Type":"ContainerDied","Data":"d64b47ba74fdc1c6cef5b48b1d27ae90b27462a281cc82783a9f72382667484d"} Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.760327 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.824859 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-sg-core-conf-yaml\") pod \"f7a8caaf-1637-4514-be33-8499030e8f2c\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.825486 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7gjs\" (UniqueName: \"kubernetes.io/projected/f7a8caaf-1637-4514-be33-8499030e8f2c-kube-api-access-v7gjs\") pod \"f7a8caaf-1637-4514-be33-8499030e8f2c\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.825538 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-scripts\") pod \"f7a8caaf-1637-4514-be33-8499030e8f2c\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.825653 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7a8caaf-1637-4514-be33-8499030e8f2c-run-httpd\") pod \"f7a8caaf-1637-4514-be33-8499030e8f2c\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.825718 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7a8caaf-1637-4514-be33-8499030e8f2c-log-httpd\") pod \"f7a8caaf-1637-4514-be33-8499030e8f2c\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.825756 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-combined-ca-bundle\") pod \"f7a8caaf-1637-4514-be33-8499030e8f2c\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.825984 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-config-data\") pod \"f7a8caaf-1637-4514-be33-8499030e8f2c\" (UID: \"f7a8caaf-1637-4514-be33-8499030e8f2c\") " Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.826896 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7a8caaf-1637-4514-be33-8499030e8f2c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f7a8caaf-1637-4514-be33-8499030e8f2c" (UID: "f7a8caaf-1637-4514-be33-8499030e8f2c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.828303 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7a8caaf-1637-4514-be33-8499030e8f2c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f7a8caaf-1637-4514-be33-8499030e8f2c" (UID: "f7a8caaf-1637-4514-be33-8499030e8f2c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.832890 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-scripts" (OuterVolumeSpecName: "scripts") pod "f7a8caaf-1637-4514-be33-8499030e8f2c" (UID: "f7a8caaf-1637-4514-be33-8499030e8f2c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.833587 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7a8caaf-1637-4514-be33-8499030e8f2c-kube-api-access-v7gjs" (OuterVolumeSpecName: "kube-api-access-v7gjs") pod "f7a8caaf-1637-4514-be33-8499030e8f2c" (UID: "f7a8caaf-1637-4514-be33-8499030e8f2c"). InnerVolumeSpecName "kube-api-access-v7gjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.857737 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f7a8caaf-1637-4514-be33-8499030e8f2c" (UID: "f7a8caaf-1637-4514-be33-8499030e8f2c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.918500 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7a8caaf-1637-4514-be33-8499030e8f2c" (UID: "f7a8caaf-1637-4514-be33-8499030e8f2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.931341 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.931372 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7gjs\" (UniqueName: \"kubernetes.io/projected/f7a8caaf-1637-4514-be33-8499030e8f2c-kube-api-access-v7gjs\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.931385 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.931395 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7a8caaf-1637-4514-be33-8499030e8f2c-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.931403 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7a8caaf-1637-4514-be33-8499030e8f2c-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.931412 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:31 crc kubenswrapper[5002]: I1203 16:53:31.936356 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-config-data" (OuterVolumeSpecName: "config-data") pod "f7a8caaf-1637-4514-be33-8499030e8f2c" (UID: "f7a8caaf-1637-4514-be33-8499030e8f2c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.033895 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a8caaf-1637-4514-be33-8499030e8f2c-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.054455 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-df99c8b7f-mljz8"] Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.551069 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7a8caaf-1637-4514-be33-8499030e8f2c","Type":"ContainerDied","Data":"e9816aeae3ee4c144ccc7696bb72da657d591fcf6b282b6f1f26fb3c2cdb855e"} Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.551255 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.551517 5002 scope.go:117] "RemoveContainer" containerID="60b90dcd5f81fb034df8898781ce4271b579a7d5a0e342bfde9510be1d1288c6" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.555011 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1823be31-afb8-4085-a9a1-f1b75c65f3a2","Type":"ContainerStarted","Data":"d908e5d007ac194ec3ebbceec4f006b453c1be2fbf1833110a61991b4704e296"} Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.558013 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-df99c8b7f-mljz8" event={"ID":"30820296-8679-481c-9466-014d473e51ee","Type":"ContainerStarted","Data":"9d77ab2291660608c77e2f3623a3656df2f34d2c652d9964617af8f0a234719c"} Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.558065 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-df99c8b7f-mljz8" event={"ID":"30820296-8679-481c-9466-014d473e51ee","Type":"ContainerStarted","Data":"e81a6b5b2d06af54cddc4be90e4d4a13ac27bedc2032aaec6cf75ef0057328dd"} Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.558078 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-df99c8b7f-mljz8" event={"ID":"30820296-8679-481c-9466-014d473e51ee","Type":"ContainerStarted","Data":"c81a36efa37c21f6dc6f757cf32fc6a5e64fa7b0649c5acb9dc86bcc83764f0e"} Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.558217 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.575577 5002 scope.go:117] "RemoveContainer" containerID="a53f2135c6ac7d2dafe3358a17c3920f584f9ac6879719de032b05223fb24d45" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.591588 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-df99c8b7f-mljz8" podStartSLOduration=6.59154964 podStartE2EDuration="6.59154964s" podCreationTimestamp="2025-12-03 16:53:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:53:32.590966674 +0000 UTC m=+1336.004788562" watchObservedRunningTime="2025-12-03 16:53:32.59154964 +0000 UTC m=+1336.005371528" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.608507 5002 scope.go:117] "RemoveContainer" containerID="d64b47ba74fdc1c6cef5b48b1d27ae90b27462a281cc82783a9f72382667484d" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.618736 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.342305143 podStartE2EDuration="12.618716168s" podCreationTimestamp="2025-12-03 16:53:20 +0000 UTC" firstStartedPulling="2025-12-03 16:53:21.157862165 +0000 UTC m=+1324.571684053" lastFinishedPulling="2025-12-03 16:53:31.43427318 +0000 UTC m=+1334.848095078" observedRunningTime="2025-12-03 16:53:32.61290339 +0000 UTC m=+1336.026725278" watchObservedRunningTime="2025-12-03 16:53:32.618716168 +0000 UTC m=+1336.032538056" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.662496 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.662864 5002 scope.go:117] "RemoveContainer" containerID="db8e5aba3b2516d10809dceee9f4571fbb87fcb962c34ab8930090c4fbb179fc" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.675040 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.687713 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:32 crc kubenswrapper[5002]: E1203 16:53:32.688499 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="ceilometer-central-agent" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.688626 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="ceilometer-central-agent" Dec 03 16:53:32 crc kubenswrapper[5002]: E1203 16:53:32.688967 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="sg-core" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.689051 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="sg-core" Dec 03 16:53:32 crc kubenswrapper[5002]: E1203 16:53:32.689146 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="ceilometer-notification-agent" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.689229 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="ceilometer-notification-agent" Dec 03 16:53:32 crc kubenswrapper[5002]: E1203 16:53:32.689311 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="proxy-httpd" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.689379 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="proxy-httpd" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.689695 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="ceilometer-notification-agent" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.689838 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="sg-core" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.689966 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="ceilometer-central-agent" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.690050 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" containerName="proxy-httpd" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.697879 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.699946 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.707612 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.707884 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.757729 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a67195a1-99d5-4e0f-998e-8abc0c4a1366-log-httpd\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.757795 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.757840 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-config-data\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.757876 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf5kc\" (UniqueName: \"kubernetes.io/projected/a67195a1-99d5-4e0f-998e-8abc0c4a1366-kube-api-access-jf5kc\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.757898 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.758147 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a67195a1-99d5-4e0f-998e-8abc0c4a1366-run-httpd\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.758366 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-scripts\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.853231 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7a8caaf-1637-4514-be33-8499030e8f2c" path="/var/lib/kubelet/pods/f7a8caaf-1637-4514-be33-8499030e8f2c/volumes" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.860196 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a67195a1-99d5-4e0f-998e-8abc0c4a1366-log-httpd\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.860419 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.860566 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-config-data\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.860671 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf5kc\" (UniqueName: \"kubernetes.io/projected/a67195a1-99d5-4e0f-998e-8abc0c4a1366-kube-api-access-jf5kc\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.860866 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.860968 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a67195a1-99d5-4e0f-998e-8abc0c4a1366-run-httpd\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.861093 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-scripts\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.861369 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a67195a1-99d5-4e0f-998e-8abc0c4a1366-run-httpd\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.860800 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a67195a1-99d5-4e0f-998e-8abc0c4a1366-log-httpd\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.866437 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.867155 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-config-data\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.868328 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-scripts\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.872165 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:32 crc kubenswrapper[5002]: I1203 16:53:32.883566 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf5kc\" (UniqueName: \"kubernetes.io/projected/a67195a1-99d5-4e0f-998e-8abc0c4a1366-kube-api-access-jf5kc\") pod \"ceilometer-0\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " pod="openstack/ceilometer-0" Dec 03 16:53:33 crc kubenswrapper[5002]: I1203 16:53:33.018550 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:53:33 crc kubenswrapper[5002]: I1203 16:53:33.503603 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:33 crc kubenswrapper[5002]: W1203 16:53:33.508006 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda67195a1_99d5_4e0f_998e_8abc0c4a1366.slice/crio-b74b93e32b020d6d056cb6d743951fcef38bf3f7b6842c87e0151c21e477fa0e WatchSource:0}: Error finding container b74b93e32b020d6d056cb6d743951fcef38bf3f7b6842c87e0151c21e477fa0e: Status 404 returned error can't find the container with id b74b93e32b020d6d056cb6d743951fcef38bf3f7b6842c87e0151c21e477fa0e Dec 03 16:53:33 crc kubenswrapper[5002]: I1203 16:53:33.575028 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a67195a1-99d5-4e0f-998e-8abc0c4a1366","Type":"ContainerStarted","Data":"b74b93e32b020d6d056cb6d743951fcef38bf3f7b6842c87e0151c21e477fa0e"} Dec 03 16:53:33 crc kubenswrapper[5002]: I1203 16:53:33.575215 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:34 crc kubenswrapper[5002]: I1203 16:53:34.589421 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a67195a1-99d5-4e0f-998e-8abc0c4a1366","Type":"ContainerStarted","Data":"35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5"} Dec 03 16:53:35 crc kubenswrapper[5002]: I1203 16:53:35.601672 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a67195a1-99d5-4e0f-998e-8abc0c4a1366","Type":"ContainerStarted","Data":"2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48"} Dec 03 16:53:35 crc kubenswrapper[5002]: I1203 16:53:35.648836 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:36 crc kubenswrapper[5002]: I1203 16:53:36.365489 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:53:36 crc kubenswrapper[5002]: I1203 16:53:36.366716 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ae1662ba-738d-4085-9744-8ba6b84a1436" containerName="glance-log" containerID="cri-o://ca1a99076b561338b712e716e07bc5bdcc6c0f06e1ceb0698bac3baedfccbc16" gracePeriod=30 Dec 03 16:53:36 crc kubenswrapper[5002]: I1203 16:53:36.367757 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ae1662ba-738d-4085-9744-8ba6b84a1436" containerName="glance-httpd" containerID="cri-o://bb4e3d3dd76099fe4e91ffb4bbe623d4107c096ca01fa011f4d0fcecf2ee1e5c" gracePeriod=30 Dec 03 16:53:36 crc kubenswrapper[5002]: I1203 16:53:36.621567 5002 generic.go:334] "Generic (PLEG): container finished" podID="ae1662ba-738d-4085-9744-8ba6b84a1436" containerID="ca1a99076b561338b712e716e07bc5bdcc6c0f06e1ceb0698bac3baedfccbc16" exitCode=143 Dec 03 16:53:36 crc kubenswrapper[5002]: I1203 16:53:36.622001 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae1662ba-738d-4085-9744-8ba6b84a1436","Type":"ContainerDied","Data":"ca1a99076b561338b712e716e07bc5bdcc6c0f06e1ceb0698bac3baedfccbc16"} Dec 03 16:53:37 crc kubenswrapper[5002]: I1203 16:53:37.303535 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:53:37 crc kubenswrapper[5002]: I1203 16:53:37.304212 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" containerName="glance-log" containerID="cri-o://7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e" gracePeriod=30 Dec 03 16:53:37 crc kubenswrapper[5002]: I1203 16:53:37.304299 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" containerName="glance-httpd" containerID="cri-o://e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13" gracePeriod=30 Dec 03 16:53:37 crc kubenswrapper[5002]: I1203 16:53:37.632196 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a67195a1-99d5-4e0f-998e-8abc0c4a1366","Type":"ContainerStarted","Data":"e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55"} Dec 03 16:53:37 crc kubenswrapper[5002]: I1203 16:53:37.634344 5002 generic.go:334] "Generic (PLEG): container finished" podID="9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" containerID="7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e" exitCode=143 Dec 03 16:53:37 crc kubenswrapper[5002]: I1203 16:53:37.634382 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf","Type":"ContainerDied","Data":"7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e"} Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.133287 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-p8w5l"] Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.135389 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-p8w5l" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.147770 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-p8w5l"] Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.257610 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-ppdv5"] Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.260492 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ppdv5" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.296215 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-ppdv5"] Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.315263 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c45df5c7-76f5-466f-9393-7815176634b6-operator-scripts\") pod \"nova-api-db-create-p8w5l\" (UID: \"c45df5c7-76f5-466f-9393-7815176634b6\") " pod="openstack/nova-api-db-create-p8w5l" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.315365 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5m2x\" (UniqueName: \"kubernetes.io/projected/c45df5c7-76f5-466f-9393-7815176634b6-kube-api-access-j5m2x\") pod \"nova-api-db-create-p8w5l\" (UID: \"c45df5c7-76f5-466f-9393-7815176634b6\") " pod="openstack/nova-api-db-create-p8w5l" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.364077 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-8fda-account-create-update-2mg6t"] Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.365455 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8fda-account-create-update-2mg6t" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.368709 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.417446 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca-operator-scripts\") pod \"nova-cell0-db-create-ppdv5\" (UID: \"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca\") " pod="openstack/nova-cell0-db-create-ppdv5" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.417570 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c45df5c7-76f5-466f-9393-7815176634b6-operator-scripts\") pod \"nova-api-db-create-p8w5l\" (UID: \"c45df5c7-76f5-466f-9393-7815176634b6\") " pod="openstack/nova-api-db-create-p8w5l" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.417607 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5m2x\" (UniqueName: \"kubernetes.io/projected/c45df5c7-76f5-466f-9393-7815176634b6-kube-api-access-j5m2x\") pod \"nova-api-db-create-p8w5l\" (UID: \"c45df5c7-76f5-466f-9393-7815176634b6\") " pod="openstack/nova-api-db-create-p8w5l" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.417645 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkmth\" (UniqueName: \"kubernetes.io/projected/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca-kube-api-access-vkmth\") pod \"nova-cell0-db-create-ppdv5\" (UID: \"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca\") " pod="openstack/nova-cell0-db-create-ppdv5" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.418636 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c45df5c7-76f5-466f-9393-7815176634b6-operator-scripts\") pod \"nova-api-db-create-p8w5l\" (UID: \"c45df5c7-76f5-466f-9393-7815176634b6\") " pod="openstack/nova-api-db-create-p8w5l" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.450405 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5m2x\" (UniqueName: \"kubernetes.io/projected/c45df5c7-76f5-466f-9393-7815176634b6-kube-api-access-j5m2x\") pod \"nova-api-db-create-p8w5l\" (UID: \"c45df5c7-76f5-466f-9393-7815176634b6\") " pod="openstack/nova-api-db-create-p8w5l" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.454862 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-jqnpw"] Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.456586 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-jqnpw" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.475802 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8fda-account-create-update-2mg6t"] Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.490828 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-jqnpw"] Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.523431 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca-operator-scripts\") pod \"nova-cell0-db-create-ppdv5\" (UID: \"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca\") " pod="openstack/nova-cell0-db-create-ppdv5" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.523525 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkmth\" (UniqueName: \"kubernetes.io/projected/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca-kube-api-access-vkmth\") pod \"nova-cell0-db-create-ppdv5\" (UID: \"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca\") " pod="openstack/nova-cell0-db-create-ppdv5" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.523557 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vnkl\" (UniqueName: \"kubernetes.io/projected/87c14106-47c6-4086-b10c-ca427875f3f0-kube-api-access-9vnkl\") pod \"nova-api-8fda-account-create-update-2mg6t\" (UID: \"87c14106-47c6-4086-b10c-ca427875f3f0\") " pod="openstack/nova-api-8fda-account-create-update-2mg6t" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.523587 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87c14106-47c6-4086-b10c-ca427875f3f0-operator-scripts\") pod \"nova-api-8fda-account-create-update-2mg6t\" (UID: \"87c14106-47c6-4086-b10c-ca427875f3f0\") " pod="openstack/nova-api-8fda-account-create-update-2mg6t" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.523615 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf4rx\" (UniqueName: \"kubernetes.io/projected/5f384c1f-6c43-4273-9dd6-301c4aad47bd-kube-api-access-wf4rx\") pod \"nova-cell1-db-create-jqnpw\" (UID: \"5f384c1f-6c43-4273-9dd6-301c4aad47bd\") " pod="openstack/nova-cell1-db-create-jqnpw" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.523647 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f384c1f-6c43-4273-9dd6-301c4aad47bd-operator-scripts\") pod \"nova-cell1-db-create-jqnpw\" (UID: \"5f384c1f-6c43-4273-9dd6-301c4aad47bd\") " pod="openstack/nova-cell1-db-create-jqnpw" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.524785 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca-operator-scripts\") pod \"nova-cell0-db-create-ppdv5\" (UID: \"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca\") " pod="openstack/nova-cell0-db-create-ppdv5" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.558793 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-be01-account-create-update-lfstp"] Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.560712 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-be01-account-create-update-lfstp" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.567693 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.580607 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-be01-account-create-update-lfstp"] Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.583403 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkmth\" (UniqueName: \"kubernetes.io/projected/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca-kube-api-access-vkmth\") pod \"nova-cell0-db-create-ppdv5\" (UID: \"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca\") " pod="openstack/nova-cell0-db-create-ppdv5" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.628111 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vnkl\" (UniqueName: \"kubernetes.io/projected/87c14106-47c6-4086-b10c-ca427875f3f0-kube-api-access-9vnkl\") pod \"nova-api-8fda-account-create-update-2mg6t\" (UID: \"87c14106-47c6-4086-b10c-ca427875f3f0\") " pod="openstack/nova-api-8fda-account-create-update-2mg6t" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.628158 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87c14106-47c6-4086-b10c-ca427875f3f0-operator-scripts\") pod \"nova-api-8fda-account-create-update-2mg6t\" (UID: \"87c14106-47c6-4086-b10c-ca427875f3f0\") " pod="openstack/nova-api-8fda-account-create-update-2mg6t" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.628190 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf4rx\" (UniqueName: \"kubernetes.io/projected/5f384c1f-6c43-4273-9dd6-301c4aad47bd-kube-api-access-wf4rx\") pod \"nova-cell1-db-create-jqnpw\" (UID: \"5f384c1f-6c43-4273-9dd6-301c4aad47bd\") " pod="openstack/nova-cell1-db-create-jqnpw" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.628216 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f384c1f-6c43-4273-9dd6-301c4aad47bd-operator-scripts\") pod \"nova-cell1-db-create-jqnpw\" (UID: \"5f384c1f-6c43-4273-9dd6-301c4aad47bd\") " pod="openstack/nova-cell1-db-create-jqnpw" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.628958 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f384c1f-6c43-4273-9dd6-301c4aad47bd-operator-scripts\") pod \"nova-cell1-db-create-jqnpw\" (UID: \"5f384c1f-6c43-4273-9dd6-301c4aad47bd\") " pod="openstack/nova-cell1-db-create-jqnpw" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.629443 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87c14106-47c6-4086-b10c-ca427875f3f0-operator-scripts\") pod \"nova-api-8fda-account-create-update-2mg6t\" (UID: \"87c14106-47c6-4086-b10c-ca427875f3f0\") " pod="openstack/nova-api-8fda-account-create-update-2mg6t" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.630532 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-p8w5l" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.646678 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ppdv5" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.669418 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf4rx\" (UniqueName: \"kubernetes.io/projected/5f384c1f-6c43-4273-9dd6-301c4aad47bd-kube-api-access-wf4rx\") pod \"nova-cell1-db-create-jqnpw\" (UID: \"5f384c1f-6c43-4273-9dd6-301c4aad47bd\") " pod="openstack/nova-cell1-db-create-jqnpw" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.688057 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vnkl\" (UniqueName: \"kubernetes.io/projected/87c14106-47c6-4086-b10c-ca427875f3f0-kube-api-access-9vnkl\") pod \"nova-api-8fda-account-create-update-2mg6t\" (UID: \"87c14106-47c6-4086-b10c-ca427875f3f0\") " pod="openstack/nova-api-8fda-account-create-update-2mg6t" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.732386 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gz49z\" (UniqueName: \"kubernetes.io/projected/85214ca4-e776-4b0a-893d-516243894640-kube-api-access-gz49z\") pod \"nova-cell0-be01-account-create-update-lfstp\" (UID: \"85214ca4-e776-4b0a-893d-516243894640\") " pod="openstack/nova-cell0-be01-account-create-update-lfstp" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.732819 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85214ca4-e776-4b0a-893d-516243894640-operator-scripts\") pod \"nova-cell0-be01-account-create-update-lfstp\" (UID: \"85214ca4-e776-4b0a-893d-516243894640\") " pod="openstack/nova-cell0-be01-account-create-update-lfstp" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.756074 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-f979-account-create-update-zqkjf"] Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.757373 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f979-account-create-update-zqkjf" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.763948 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.774315 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f979-account-create-update-zqkjf"] Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.834851 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gz49z\" (UniqueName: \"kubernetes.io/projected/85214ca4-e776-4b0a-893d-516243894640-kube-api-access-gz49z\") pod \"nova-cell0-be01-account-create-update-lfstp\" (UID: \"85214ca4-e776-4b0a-893d-516243894640\") " pod="openstack/nova-cell0-be01-account-create-update-lfstp" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.834934 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85214ca4-e776-4b0a-893d-516243894640-operator-scripts\") pod \"nova-cell0-be01-account-create-update-lfstp\" (UID: \"85214ca4-e776-4b0a-893d-516243894640\") " pod="openstack/nova-cell0-be01-account-create-update-lfstp" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.836832 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85214ca4-e776-4b0a-893d-516243894640-operator-scripts\") pod \"nova-cell0-be01-account-create-update-lfstp\" (UID: \"85214ca4-e776-4b0a-893d-516243894640\") " pod="openstack/nova-cell0-be01-account-create-update-lfstp" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.837273 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-jqnpw" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.865437 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gz49z\" (UniqueName: \"kubernetes.io/projected/85214ca4-e776-4b0a-893d-516243894640-kube-api-access-gz49z\") pod \"nova-cell0-be01-account-create-update-lfstp\" (UID: \"85214ca4-e776-4b0a-893d-516243894640\") " pod="openstack/nova-cell0-be01-account-create-update-lfstp" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.939574 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n75j\" (UniqueName: \"kubernetes.io/projected/416c4441-853a-4b67-95a9-79fc893fa7be-kube-api-access-9n75j\") pod \"nova-cell1-f979-account-create-update-zqkjf\" (UID: \"416c4441-853a-4b67-95a9-79fc893fa7be\") " pod="openstack/nova-cell1-f979-account-create-update-zqkjf" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.939640 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/416c4441-853a-4b67-95a9-79fc893fa7be-operator-scripts\") pod \"nova-cell1-f979-account-create-update-zqkjf\" (UID: \"416c4441-853a-4b67-95a9-79fc893fa7be\") " pod="openstack/nova-cell1-f979-account-create-update-zqkjf" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.949227 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-be01-account-create-update-lfstp" Dec 03 16:53:38 crc kubenswrapper[5002]: I1203 16:53:38.988314 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8fda-account-create-update-2mg6t" Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.042715 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n75j\" (UniqueName: \"kubernetes.io/projected/416c4441-853a-4b67-95a9-79fc893fa7be-kube-api-access-9n75j\") pod \"nova-cell1-f979-account-create-update-zqkjf\" (UID: \"416c4441-853a-4b67-95a9-79fc893fa7be\") " pod="openstack/nova-cell1-f979-account-create-update-zqkjf" Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.042812 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/416c4441-853a-4b67-95a9-79fc893fa7be-operator-scripts\") pod \"nova-cell1-f979-account-create-update-zqkjf\" (UID: \"416c4441-853a-4b67-95a9-79fc893fa7be\") " pod="openstack/nova-cell1-f979-account-create-update-zqkjf" Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.043731 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/416c4441-853a-4b67-95a9-79fc893fa7be-operator-scripts\") pod \"nova-cell1-f979-account-create-update-zqkjf\" (UID: \"416c4441-853a-4b67-95a9-79fc893fa7be\") " pod="openstack/nova-cell1-f979-account-create-update-zqkjf" Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.073567 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n75j\" (UniqueName: \"kubernetes.io/projected/416c4441-853a-4b67-95a9-79fc893fa7be-kube-api-access-9n75j\") pod \"nova-cell1-f979-account-create-update-zqkjf\" (UID: \"416c4441-853a-4b67-95a9-79fc893fa7be\") " pod="openstack/nova-cell1-f979-account-create-update-zqkjf" Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.226902 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f979-account-create-update-zqkjf" Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.340654 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-p8w5l"] Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.464966 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-ppdv5"] Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.656026 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-jqnpw"] Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.691701 5002 generic.go:334] "Generic (PLEG): container finished" podID="ae1662ba-738d-4085-9744-8ba6b84a1436" containerID="bb4e3d3dd76099fe4e91ffb4bbe623d4107c096ca01fa011f4d0fcecf2ee1e5c" exitCode=0 Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.691946 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae1662ba-738d-4085-9744-8ba6b84a1436","Type":"ContainerDied","Data":"bb4e3d3dd76099fe4e91ffb4bbe623d4107c096ca01fa011f4d0fcecf2ee1e5c"} Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.695667 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ppdv5" event={"ID":"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca","Type":"ContainerStarted","Data":"1df01c0d022f245cfd97b1bec388ae7b85e89bba70b7c823036c79ebca78311d"} Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.697178 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-p8w5l" event={"ID":"c45df5c7-76f5-466f-9393-7815176634b6","Type":"ContainerStarted","Data":"30188c44024de0926d6fc67c6c0a8dff552acaeaba9bc28c79db70a9e91df69b"} Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.725880 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a67195a1-99d5-4e0f-998e-8abc0c4a1366","Type":"ContainerStarted","Data":"c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9"} Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.726405 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="ceilometer-central-agent" containerID="cri-o://35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5" gracePeriod=30 Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.726714 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.726869 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="ceilometer-notification-agent" containerID="cri-o://2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48" gracePeriod=30 Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.726890 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="sg-core" containerID="cri-o://e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55" gracePeriod=30 Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.727215 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="proxy-httpd" containerID="cri-o://c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9" gracePeriod=30 Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.760558 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.164733597 podStartE2EDuration="7.760531774s" podCreationTimestamp="2025-12-03 16:53:32 +0000 UTC" firstStartedPulling="2025-12-03 16:53:33.510924259 +0000 UTC m=+1336.924746147" lastFinishedPulling="2025-12-03 16:53:38.106722436 +0000 UTC m=+1341.520544324" observedRunningTime="2025-12-03 16:53:39.746308477 +0000 UTC m=+1343.160130365" watchObservedRunningTime="2025-12-03 16:53:39.760531774 +0000 UTC m=+1343.174353662" Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.929939 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-be01-account-create-update-lfstp"] Dec 03 16:53:39 crc kubenswrapper[5002]: I1203 16:53:39.943520 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8fda-account-create-update-2mg6t"] Dec 03 16:53:40 crc kubenswrapper[5002]: I1203 16:53:40.072557 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f979-account-create-update-zqkjf"] Dec 03 16:53:40 crc kubenswrapper[5002]: E1203 16:53:40.122335 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda67195a1_99d5_4e0f_998e_8abc0c4a1366.slice/crio-c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda67195a1_99d5_4e0f_998e_8abc0c4a1366.slice/crio-conmon-e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55.scope\": RecentStats: unable to find data in memory cache]" Dec 03 16:53:40 crc kubenswrapper[5002]: W1203 16:53:40.386808 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87c14106_47c6_4086_b10c_ca427875f3f0.slice/crio-d4936fbdd7f6833bc812c954fba0074c7db558da86e0e3432dae7788f5ba925d WatchSource:0}: Error finding container d4936fbdd7f6833bc812c954fba0074c7db558da86e0e3432dae7788f5ba925d: Status 404 returned error can't find the container with id d4936fbdd7f6833bc812c954fba0074c7db558da86e0e3432dae7788f5ba925d Dec 03 16:53:40 crc kubenswrapper[5002]: W1203 16:53:40.391813 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod416c4441_853a_4b67_95a9_79fc893fa7be.slice/crio-59d36c3dc490f92b7c96d0d2a06da3b93fa9af554f2f1ea5a53049f5e33d7a37 WatchSource:0}: Error finding container 59d36c3dc490f92b7c96d0d2a06da3b93fa9af554f2f1ea5a53049f5e33d7a37: Status 404 returned error can't find the container with id 59d36c3dc490f92b7c96d0d2a06da3b93fa9af554f2f1ea5a53049f5e33d7a37 Dec 03 16:53:40 crc kubenswrapper[5002]: I1203 16:53:40.736809 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8fda-account-create-update-2mg6t" event={"ID":"87c14106-47c6-4086-b10c-ca427875f3f0","Type":"ContainerStarted","Data":"d4936fbdd7f6833bc812c954fba0074c7db558da86e0e3432dae7788f5ba925d"} Dec 03 16:53:40 crc kubenswrapper[5002]: I1203 16:53:40.742480 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f979-account-create-update-zqkjf" event={"ID":"416c4441-853a-4b67-95a9-79fc893fa7be","Type":"ContainerStarted","Data":"59d36c3dc490f92b7c96d0d2a06da3b93fa9af554f2f1ea5a53049f5e33d7a37"} Dec 03 16:53:40 crc kubenswrapper[5002]: I1203 16:53:40.770277 5002 generic.go:334] "Generic (PLEG): container finished" podID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerID="c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9" exitCode=0 Dec 03 16:53:40 crc kubenswrapper[5002]: I1203 16:53:40.770317 5002 generic.go:334] "Generic (PLEG): container finished" podID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerID="e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55" exitCode=2 Dec 03 16:53:40 crc kubenswrapper[5002]: I1203 16:53:40.770325 5002 generic.go:334] "Generic (PLEG): container finished" podID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerID="2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48" exitCode=0 Dec 03 16:53:40 crc kubenswrapper[5002]: I1203 16:53:40.770438 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a67195a1-99d5-4e0f-998e-8abc0c4a1366","Type":"ContainerDied","Data":"c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9"} Dec 03 16:53:40 crc kubenswrapper[5002]: I1203 16:53:40.770476 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a67195a1-99d5-4e0f-998e-8abc0c4a1366","Type":"ContainerDied","Data":"e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55"} Dec 03 16:53:40 crc kubenswrapper[5002]: I1203 16:53:40.770504 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a67195a1-99d5-4e0f-998e-8abc0c4a1366","Type":"ContainerDied","Data":"2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48"} Dec 03 16:53:40 crc kubenswrapper[5002]: I1203 16:53:40.774484 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-be01-account-create-update-lfstp" event={"ID":"85214ca4-e776-4b0a-893d-516243894640","Type":"ContainerStarted","Data":"57a00f334543f967b76155fb8b16a246fcc215eee1e52018b4b75ea9cc670cbe"} Dec 03 16:53:40 crc kubenswrapper[5002]: I1203 16:53:40.780959 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-jqnpw" event={"ID":"5f384c1f-6c43-4273-9dd6-301c4aad47bd","Type":"ContainerStarted","Data":"4d7e5ec045ff0f3b0e56a75eeeef14b8710780fb8894411c4a6d236b6c4937eb"} Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.505961 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.511718 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.600858 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-logs\") pod \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.600917 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-combined-ca-bundle\") pod \"ae1662ba-738d-4085-9744-8ba6b84a1436\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.600940 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae1662ba-738d-4085-9744-8ba6b84a1436-logs\") pod \"ae1662ba-738d-4085-9744-8ba6b84a1436\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.601001 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ae1662ba-738d-4085-9744-8ba6b84a1436\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.601660 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae1662ba-738d-4085-9744-8ba6b84a1436-logs" (OuterVolumeSpecName: "logs") pod "ae1662ba-738d-4085-9744-8ba6b84a1436" (UID: "ae1662ba-738d-4085-9744-8ba6b84a1436"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.601834 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-logs" (OuterVolumeSpecName: "logs") pod "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" (UID: "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602571 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-combined-ca-bundle\") pod \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602604 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae1662ba-738d-4085-9744-8ba6b84a1436-httpd-run\") pod \"ae1662ba-738d-4085-9744-8ba6b84a1436\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602626 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-config-data\") pod \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602685 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-httpd-run\") pod \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602705 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-internal-tls-certs\") pod \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602740 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-public-tls-certs\") pod \"ae1662ba-738d-4085-9744-8ba6b84a1436\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602797 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-config-data\") pod \"ae1662ba-738d-4085-9744-8ba6b84a1436\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602831 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602855 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mld2\" (UniqueName: \"kubernetes.io/projected/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-kube-api-access-6mld2\") pod \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602882 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-scripts\") pod \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\" (UID: \"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602935 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-scripts\") pod \"ae1662ba-738d-4085-9744-8ba6b84a1436\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602962 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae1662ba-738d-4085-9744-8ba6b84a1436-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ae1662ba-738d-4085-9744-8ba6b84a1436" (UID: "ae1662ba-738d-4085-9744-8ba6b84a1436"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.602992 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-psht7\" (UniqueName: \"kubernetes.io/projected/ae1662ba-738d-4085-9744-8ba6b84a1436-kube-api-access-psht7\") pod \"ae1662ba-738d-4085-9744-8ba6b84a1436\" (UID: \"ae1662ba-738d-4085-9744-8ba6b84a1436\") " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.603432 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae1662ba-738d-4085-9744-8ba6b84a1436-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.603450 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.603462 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae1662ba-738d-4085-9744-8ba6b84a1436-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.604015 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" (UID: "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.637807 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-scripts" (OuterVolumeSpecName: "scripts") pod "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" (UID: "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.639276 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae1662ba-738d-4085-9744-8ba6b84a1436-kube-api-access-psht7" (OuterVolumeSpecName: "kube-api-access-psht7") pod "ae1662ba-738d-4085-9744-8ba6b84a1436" (UID: "ae1662ba-738d-4085-9744-8ba6b84a1436"). InnerVolumeSpecName "kube-api-access-psht7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.643809 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "ae1662ba-738d-4085-9744-8ba6b84a1436" (UID: "ae1662ba-738d-4085-9744-8ba6b84a1436"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.655049 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-kube-api-access-6mld2" (OuterVolumeSpecName: "kube-api-access-6mld2") pod "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" (UID: "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf"). InnerVolumeSpecName "kube-api-access-6mld2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.658192 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-scripts" (OuterVolumeSpecName: "scripts") pod "ae1662ba-738d-4085-9744-8ba6b84a1436" (UID: "ae1662ba-738d-4085-9744-8ba6b84a1436"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.658332 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" (UID: "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.674387 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" (UID: "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.715991 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.716032 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.716191 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.716212 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.716225 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mld2\" (UniqueName: \"kubernetes.io/projected/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-kube-api-access-6mld2\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.716237 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.716262 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.716273 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-psht7\" (UniqueName: \"kubernetes.io/projected/ae1662ba-738d-4085-9744-8ba6b84a1436-kube-api-access-psht7\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.752338 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae1662ba-738d-4085-9744-8ba6b84a1436" (UID: "ae1662ba-738d-4085-9744-8ba6b84a1436"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.759087 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.784081 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.788408 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.806965 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" (UID: "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.818124 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.818160 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.818176 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.830864 5002 generic.go:334] "Generic (PLEG): container finished" podID="85214ca4-e776-4b0a-893d-516243894640" containerID="83bc12a265e0074e224d6cf33b500361e2aac9770f8cdf19b1148ec59fa48f6e" exitCode=0 Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.830975 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-be01-account-create-update-lfstp" event={"ID":"85214ca4-e776-4b0a-893d-516243894640","Type":"ContainerDied","Data":"83bc12a265e0074e224d6cf33b500361e2aac9770f8cdf19b1148ec59fa48f6e"} Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.832930 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae1662ba-738d-4085-9744-8ba6b84a1436","Type":"ContainerDied","Data":"aa00d32052da85c4f5ac0fa140c2b7fce2c1fe765e3e28be3b6ba6b10efa72d6"} Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.832963 5002 scope.go:117] "RemoveContainer" containerID="bb4e3d3dd76099fe4e91ffb4bbe623d4107c096ca01fa011f4d0fcecf2ee1e5c" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.833086 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.859415 5002 generic.go:334] "Generic (PLEG): container finished" podID="9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" containerID="e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13" exitCode=0 Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.859489 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf","Type":"ContainerDied","Data":"e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13"} Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.859517 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9a7e593b-4bfc-486c-b301-bfb7f39fbfbf","Type":"ContainerDied","Data":"f04253a74213b3a3cbb57bb436b1adc48a5f60345eb4a6fa21fc6adaf32dfe46"} Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.859580 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.892283 5002 scope.go:117] "RemoveContainer" containerID="ca1a99076b561338b712e716e07bc5bdcc6c0f06e1ceb0698bac3baedfccbc16" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.896365 5002 generic.go:334] "Generic (PLEG): container finished" podID="5f384c1f-6c43-4273-9dd6-301c4aad47bd" containerID="f8619d4dd7755cc1678293835a839ff1fae1a4ef2ee2b2138b7794279ebcef54" exitCode=0 Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.896435 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-jqnpw" event={"ID":"5f384c1f-6c43-4273-9dd6-301c4aad47bd","Type":"ContainerDied","Data":"f8619d4dd7755cc1678293835a839ff1fae1a4ef2ee2b2138b7794279ebcef54"} Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.905974 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-config-data" (OuterVolumeSpecName: "config-data") pod "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" (UID: "9a7e593b-4bfc-486c-b301-bfb7f39fbfbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.906902 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-config-data" (OuterVolumeSpecName: "config-data") pod "ae1662ba-738d-4085-9744-8ba6b84a1436" (UID: "ae1662ba-738d-4085-9744-8ba6b84a1436"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.908137 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.932248 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.932290 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.932301 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.962596 5002 generic.go:334] "Generic (PLEG): container finished" podID="87c14106-47c6-4086-b10c-ca427875f3f0" containerID="36cff69dfad7c31954e2260b65d28d76d76ad21b2a3da4e43e99aaca2171482c" exitCode=0 Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.962680 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8fda-account-create-update-2mg6t" event={"ID":"87c14106-47c6-4086-b10c-ca427875f3f0","Type":"ContainerDied","Data":"36cff69dfad7c31954e2260b65d28d76d76ad21b2a3da4e43e99aaca2171482c"} Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.983625 5002 generic.go:334] "Generic (PLEG): container finished" podID="5e39aa75-8f7f-4fd2-864b-b0ba10f955ca" containerID="e155877081445eee5138996cc287338fe1fb77ea902023396c1de90698d90b16" exitCode=0 Dec 03 16:53:41 crc kubenswrapper[5002]: I1203 16:53:41.983704 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ppdv5" event={"ID":"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca","Type":"ContainerDied","Data":"e155877081445eee5138996cc287338fe1fb77ea902023396c1de90698d90b16"} Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.022507 5002 scope.go:117] "RemoveContainer" containerID="e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.039362 5002 generic.go:334] "Generic (PLEG): container finished" podID="416c4441-853a-4b67-95a9-79fc893fa7be" containerID="7b40bf8ba185db9fe3f624f36e84fbc575c0cd6f08a6e739a0f0496bd507ae1f" exitCode=0 Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.039446 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f979-account-create-update-zqkjf" event={"ID":"416c4441-853a-4b67-95a9-79fc893fa7be","Type":"ContainerDied","Data":"7b40bf8ba185db9fe3f624f36e84fbc575c0cd6f08a6e739a0f0496bd507ae1f"} Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.053067 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ae1662ba-738d-4085-9744-8ba6b84a1436" (UID: "ae1662ba-738d-4085-9744-8ba6b84a1436"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.060771 5002 generic.go:334] "Generic (PLEG): container finished" podID="c45df5c7-76f5-466f-9393-7815176634b6" containerID="115c8c772f00cb96dcd1beb56316398ca922e717a486f1720cd17f7b222565ff" exitCode=0 Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.061908 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-p8w5l" event={"ID":"c45df5c7-76f5-466f-9393-7815176634b6","Type":"ContainerDied","Data":"115c8c772f00cb96dcd1beb56316398ca922e717a486f1720cd17f7b222565ff"} Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.109041 5002 scope.go:117] "RemoveContainer" containerID="7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.140419 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae1662ba-738d-4085-9744-8ba6b84a1436-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.145242 5002 scope.go:117] "RemoveContainer" containerID="e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13" Dec 03 16:53:42 crc kubenswrapper[5002]: E1203 16:53:42.152602 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13\": container with ID starting with e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13 not found: ID does not exist" containerID="e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.152875 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13"} err="failed to get container status \"e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13\": rpc error: code = NotFound desc = could not find container \"e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13\": container with ID starting with e6648c35bf0cb680fa89281b40660d01f4d7a766e7f05e9923fb5f8772979a13 not found: ID does not exist" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.152932 5002 scope.go:117] "RemoveContainer" containerID="7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e" Dec 03 16:53:42 crc kubenswrapper[5002]: E1203 16:53:42.155038 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e\": container with ID starting with 7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e not found: ID does not exist" containerID="7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.155067 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e"} err="failed to get container status \"7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e\": rpc error: code = NotFound desc = could not find container \"7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e\": container with ID starting with 7687d838e9b4d1b6b248898fb3dc9829d5b9e6a751cec34077acf0b1d754b33e not found: ID does not exist" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.275960 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.288146 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.297666 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:53:42 crc kubenswrapper[5002]: E1203 16:53:42.298285 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" containerName="glance-log" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.298306 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" containerName="glance-log" Dec 03 16:53:42 crc kubenswrapper[5002]: E1203 16:53:42.298336 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae1662ba-738d-4085-9744-8ba6b84a1436" containerName="glance-httpd" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.298343 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae1662ba-738d-4085-9744-8ba6b84a1436" containerName="glance-httpd" Dec 03 16:53:42 crc kubenswrapper[5002]: E1203 16:53:42.298366 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" containerName="glance-httpd" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.298373 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" containerName="glance-httpd" Dec 03 16:53:42 crc kubenswrapper[5002]: E1203 16:53:42.298389 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae1662ba-738d-4085-9744-8ba6b84a1436" containerName="glance-log" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.298395 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae1662ba-738d-4085-9744-8ba6b84a1436" containerName="glance-log" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.298595 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae1662ba-738d-4085-9744-8ba6b84a1436" containerName="glance-httpd" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.298616 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" containerName="glance-httpd" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.298629 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" containerName="glance-log" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.298639 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae1662ba-738d-4085-9744-8ba6b84a1436" containerName="glance-log" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.299897 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.303556 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.304127 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.304317 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-njd5b" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.304370 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.304990 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.312089 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.320659 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.328869 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.333928 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.337494 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.337700 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.341548 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.448051 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.448138 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.448324 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-scripts\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.448477 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.448521 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.448639 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.448783 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.448850 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-logs\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.448938 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpsvc\" (UniqueName: \"kubernetes.io/projected/d105ad9d-fbca-4a0c-b188-a88a363756c2-kube-api-access-wpsvc\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.448988 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.449052 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d105ad9d-fbca-4a0c-b188-a88a363756c2-logs\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.449315 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.449419 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.449500 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-config-data\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.449569 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jslt\" (UniqueName: \"kubernetes.io/projected/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-kube-api-access-8jslt\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.449664 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d105ad9d-fbca-4a0c-b188-a88a363756c2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.551738 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.551815 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.551874 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.551899 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-logs\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.552730 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpsvc\" (UniqueName: \"kubernetes.io/projected/d105ad9d-fbca-4a0c-b188-a88a363756c2-kube-api-access-wpsvc\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.552798 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.552826 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d105ad9d-fbca-4a0c-b188-a88a363756c2-logs\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.552647 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-logs\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.552899 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.552967 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.552988 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-config-data\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.553026 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jslt\" (UniqueName: \"kubernetes.io/projected/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-kube-api-access-8jslt\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.553051 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d105ad9d-fbca-4a0c-b188-a88a363756c2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.553091 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.553134 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.553172 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-scripts\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.553210 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.553242 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.553391 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.553694 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d105ad9d-fbca-4a0c-b188-a88a363756c2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.553941 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d105ad9d-fbca-4a0c-b188-a88a363756c2-logs\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.554216 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.558110 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-config-data\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.559051 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.560785 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.561289 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-scripts\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.561319 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.566280 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.573633 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.573934 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jslt\" (UniqueName: \"kubernetes.io/projected/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-kube-api-access-8jslt\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.577211 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.588550 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpsvc\" (UniqueName: \"kubernetes.io/projected/d105ad9d-fbca-4a0c-b188-a88a363756c2-kube-api-access-wpsvc\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.599091 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.608289 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.623966 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.651297 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.869113 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a7e593b-4bfc-486c-b301-bfb7f39fbfbf" path="/var/lib/kubelet/pods/9a7e593b-4bfc-486c-b301-bfb7f39fbfbf/volumes" Dec 03 16:53:42 crc kubenswrapper[5002]: I1203 16:53:42.870406 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae1662ba-738d-4085-9744-8ba6b84a1436" path="/var/lib/kubelet/pods/ae1662ba-738d-4085-9744-8ba6b84a1436/volumes" Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.300269 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.390531 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.623729 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-be01-account-create-update-lfstp" Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.686524 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85214ca4-e776-4b0a-893d-516243894640-operator-scripts\") pod \"85214ca4-e776-4b0a-893d-516243894640\" (UID: \"85214ca4-e776-4b0a-893d-516243894640\") " Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.686682 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gz49z\" (UniqueName: \"kubernetes.io/projected/85214ca4-e776-4b0a-893d-516243894640-kube-api-access-gz49z\") pod \"85214ca4-e776-4b0a-893d-516243894640\" (UID: \"85214ca4-e776-4b0a-893d-516243894640\") " Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.687560 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85214ca4-e776-4b0a-893d-516243894640-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "85214ca4-e776-4b0a-893d-516243894640" (UID: "85214ca4-e776-4b0a-893d-516243894640"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.700583 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85214ca4-e776-4b0a-893d-516243894640-kube-api-access-gz49z" (OuterVolumeSpecName: "kube-api-access-gz49z") pod "85214ca4-e776-4b0a-893d-516243894640" (UID: "85214ca4-e776-4b0a-893d-516243894640"). InnerVolumeSpecName "kube-api-access-gz49z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.788673 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gz49z\" (UniqueName: \"kubernetes.io/projected/85214ca4-e776-4b0a-893d-516243894640-kube-api-access-gz49z\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.788704 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85214ca4-e776-4b0a-893d-516243894640-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.925252 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-p8w5l" Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.926023 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ppdv5" Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.962187 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f979-account-create-update-zqkjf" Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.963458 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-jqnpw" Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.986575 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8fda-account-create-update-2mg6t" Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.993102 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5m2x\" (UniqueName: \"kubernetes.io/projected/c45df5c7-76f5-466f-9393-7815176634b6-kube-api-access-j5m2x\") pod \"c45df5c7-76f5-466f-9393-7815176634b6\" (UID: \"c45df5c7-76f5-466f-9393-7815176634b6\") " Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.993224 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c45df5c7-76f5-466f-9393-7815176634b6-operator-scripts\") pod \"c45df5c7-76f5-466f-9393-7815176634b6\" (UID: \"c45df5c7-76f5-466f-9393-7815176634b6\") " Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.993333 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca-operator-scripts\") pod \"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca\" (UID: \"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca\") " Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.993439 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkmth\" (UniqueName: \"kubernetes.io/projected/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca-kube-api-access-vkmth\") pod \"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca\" (UID: \"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca\") " Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.995466 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c45df5c7-76f5-466f-9393-7815176634b6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c45df5c7-76f5-466f-9393-7815176634b6" (UID: "c45df5c7-76f5-466f-9393-7815176634b6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:43 crc kubenswrapper[5002]: I1203 16:53:43.995548 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5e39aa75-8f7f-4fd2-864b-b0ba10f955ca" (UID: "5e39aa75-8f7f-4fd2-864b-b0ba10f955ca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.002472 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca-kube-api-access-vkmth" (OuterVolumeSpecName: "kube-api-access-vkmth") pod "5e39aa75-8f7f-4fd2-864b-b0ba10f955ca" (UID: "5e39aa75-8f7f-4fd2-864b-b0ba10f955ca"). InnerVolumeSpecName "kube-api-access-vkmth". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.010126 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c45df5c7-76f5-466f-9393-7815176634b6-kube-api-access-j5m2x" (OuterVolumeSpecName: "kube-api-access-j5m2x") pod "c45df5c7-76f5-466f-9393-7815176634b6" (UID: "c45df5c7-76f5-466f-9393-7815176634b6"). InnerVolumeSpecName "kube-api-access-j5m2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.092383 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-be01-account-create-update-lfstp" event={"ID":"85214ca4-e776-4b0a-893d-516243894640","Type":"ContainerDied","Data":"57a00f334543f967b76155fb8b16a246fcc215eee1e52018b4b75ea9cc670cbe"} Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.092447 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57a00f334543f967b76155fb8b16a246fcc215eee1e52018b4b75ea9cc670cbe" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.092545 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-be01-account-create-update-lfstp" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.095717 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/416c4441-853a-4b67-95a9-79fc893fa7be-operator-scripts\") pod \"416c4441-853a-4b67-95a9-79fc893fa7be\" (UID: \"416c4441-853a-4b67-95a9-79fc893fa7be\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.095832 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f384c1f-6c43-4273-9dd6-301c4aad47bd-operator-scripts\") pod \"5f384c1f-6c43-4273-9dd6-301c4aad47bd\" (UID: \"5f384c1f-6c43-4273-9dd6-301c4aad47bd\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.095883 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87c14106-47c6-4086-b10c-ca427875f3f0-operator-scripts\") pod \"87c14106-47c6-4086-b10c-ca427875f3f0\" (UID: \"87c14106-47c6-4086-b10c-ca427875f3f0\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.096045 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vnkl\" (UniqueName: \"kubernetes.io/projected/87c14106-47c6-4086-b10c-ca427875f3f0-kube-api-access-9vnkl\") pod \"87c14106-47c6-4086-b10c-ca427875f3f0\" (UID: \"87c14106-47c6-4086-b10c-ca427875f3f0\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.097344 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f384c1f-6c43-4273-9dd6-301c4aad47bd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5f384c1f-6c43-4273-9dd6-301c4aad47bd" (UID: "5f384c1f-6c43-4273-9dd6-301c4aad47bd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.097419 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/416c4441-853a-4b67-95a9-79fc893fa7be-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "416c4441-853a-4b67-95a9-79fc893fa7be" (UID: "416c4441-853a-4b67-95a9-79fc893fa7be"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.097716 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87c14106-47c6-4086-b10c-ca427875f3f0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "87c14106-47c6-4086-b10c-ca427875f3f0" (UID: "87c14106-47c6-4086-b10c-ca427875f3f0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.108887 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9n75j\" (UniqueName: \"kubernetes.io/projected/416c4441-853a-4b67-95a9-79fc893fa7be-kube-api-access-9n75j\") pod \"416c4441-853a-4b67-95a9-79fc893fa7be\" (UID: \"416c4441-853a-4b67-95a9-79fc893fa7be\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.109121 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wf4rx\" (UniqueName: \"kubernetes.io/projected/5f384c1f-6c43-4273-9dd6-301c4aad47bd-kube-api-access-wf4rx\") pod \"5f384c1f-6c43-4273-9dd6-301c4aad47bd\" (UID: \"5f384c1f-6c43-4273-9dd6-301c4aad47bd\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.110536 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.110574 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/416c4441-853a-4b67-95a9-79fc893fa7be-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.110589 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkmth\" (UniqueName: \"kubernetes.io/projected/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca-kube-api-access-vkmth\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.110601 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f384c1f-6c43-4273-9dd6-301c4aad47bd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.110613 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87c14106-47c6-4086-b10c-ca427875f3f0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.110624 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5m2x\" (UniqueName: \"kubernetes.io/projected/c45df5c7-76f5-466f-9393-7815176634b6-kube-api-access-j5m2x\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.110637 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c45df5c7-76f5-466f-9393-7815176634b6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.110641 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87c14106-47c6-4086-b10c-ca427875f3f0-kube-api-access-9vnkl" (OuterVolumeSpecName: "kube-api-access-9vnkl") pod "87c14106-47c6-4086-b10c-ca427875f3f0" (UID: "87c14106-47c6-4086-b10c-ca427875f3f0"). InnerVolumeSpecName "kube-api-access-9vnkl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.110719 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d105ad9d-fbca-4a0c-b188-a88a363756c2","Type":"ContainerStarted","Data":"4dd01f4f0edb9fffdc14edcd8409635fe351d4bce2c3fbc1b656dc5ffe6a54ad"} Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.113505 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/416c4441-853a-4b67-95a9-79fc893fa7be-kube-api-access-9n75j" (OuterVolumeSpecName: "kube-api-access-9n75j") pod "416c4441-853a-4b67-95a9-79fc893fa7be" (UID: "416c4441-853a-4b67-95a9-79fc893fa7be"). InnerVolumeSpecName "kube-api-access-9n75j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.115894 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f384c1f-6c43-4273-9dd6-301c4aad47bd-kube-api-access-wf4rx" (OuterVolumeSpecName: "kube-api-access-wf4rx") pod "5f384c1f-6c43-4273-9dd6-301c4aad47bd" (UID: "5f384c1f-6c43-4273-9dd6-301c4aad47bd"). InnerVolumeSpecName "kube-api-access-wf4rx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.120709 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-jqnpw" event={"ID":"5f384c1f-6c43-4273-9dd6-301c4aad47bd","Type":"ContainerDied","Data":"4d7e5ec045ff0f3b0e56a75eeeef14b8710780fb8894411c4a6d236b6c4937eb"} Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.120774 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d7e5ec045ff0f3b0e56a75eeeef14b8710780fb8894411c4a6d236b6c4937eb" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.120872 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-jqnpw" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.127447 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8fda-account-create-update-2mg6t" event={"ID":"87c14106-47c6-4086-b10c-ca427875f3f0","Type":"ContainerDied","Data":"d4936fbdd7f6833bc812c954fba0074c7db558da86e0e3432dae7788f5ba925d"} Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.127502 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4936fbdd7f6833bc812c954fba0074c7db558da86e0e3432dae7788f5ba925d" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.127597 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8fda-account-create-update-2mg6t" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.144075 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ppdv5" event={"ID":"5e39aa75-8f7f-4fd2-864b-b0ba10f955ca","Type":"ContainerDied","Data":"1df01c0d022f245cfd97b1bec388ae7b85e89bba70b7c823036c79ebca78311d"} Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.144125 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1df01c0d022f245cfd97b1bec388ae7b85e89bba70b7c823036c79ebca78311d" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.144199 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ppdv5" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.155249 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f979-account-create-update-zqkjf" event={"ID":"416c4441-853a-4b67-95a9-79fc893fa7be","Type":"ContainerDied","Data":"59d36c3dc490f92b7c96d0d2a06da3b93fa9af554f2f1ea5a53049f5e33d7a37"} Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.155304 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59d36c3dc490f92b7c96d0d2a06da3b93fa9af554f2f1ea5a53049f5e33d7a37" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.155374 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f979-account-create-update-zqkjf" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.186472 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929","Type":"ContainerStarted","Data":"095e1c781a31fb7b65d3f6dfb5bfed87a58fb3e7589277c53b4f7f4bc4ffce5f"} Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.209138 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-p8w5l" event={"ID":"c45df5c7-76f5-466f-9393-7815176634b6","Type":"ContainerDied","Data":"30188c44024de0926d6fc67c6c0a8dff552acaeaba9bc28c79db70a9e91df69b"} Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.209830 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30188c44024de0926d6fc67c6c0a8dff552acaeaba9bc28c79db70a9e91df69b" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.209928 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-p8w5l" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.214142 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vnkl\" (UniqueName: \"kubernetes.io/projected/87c14106-47c6-4086-b10c-ca427875f3f0-kube-api-access-9vnkl\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.214180 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9n75j\" (UniqueName: \"kubernetes.io/projected/416c4441-853a-4b67-95a9-79fc893fa7be-kube-api-access-9n75j\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.214198 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wf4rx\" (UniqueName: \"kubernetes.io/projected/5f384c1f-6c43-4273-9dd6-301c4aad47bd-kube-api-access-wf4rx\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.646340 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.726625 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-config-data\") pod \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.726681 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-scripts\") pod \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.726716 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-combined-ca-bundle\") pod \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.726817 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jf5kc\" (UniqueName: \"kubernetes.io/projected/a67195a1-99d5-4e0f-998e-8abc0c4a1366-kube-api-access-jf5kc\") pod \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.727688 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a67195a1-99d5-4e0f-998e-8abc0c4a1366-log-httpd\") pod \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.727761 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a67195a1-99d5-4e0f-998e-8abc0c4a1366-run-httpd\") pod \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.727842 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-sg-core-conf-yaml\") pod \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\" (UID: \"a67195a1-99d5-4e0f-998e-8abc0c4a1366\") " Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.728203 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a67195a1-99d5-4e0f-998e-8abc0c4a1366-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a67195a1-99d5-4e0f-998e-8abc0c4a1366" (UID: "a67195a1-99d5-4e0f-998e-8abc0c4a1366"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.729711 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a67195a1-99d5-4e0f-998e-8abc0c4a1366-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a67195a1-99d5-4e0f-998e-8abc0c4a1366" (UID: "a67195a1-99d5-4e0f-998e-8abc0c4a1366"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.731880 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-scripts" (OuterVolumeSpecName: "scripts") pod "a67195a1-99d5-4e0f-998e-8abc0c4a1366" (UID: "a67195a1-99d5-4e0f-998e-8abc0c4a1366"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.739940 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a67195a1-99d5-4e0f-998e-8abc0c4a1366-kube-api-access-jf5kc" (OuterVolumeSpecName: "kube-api-access-jf5kc") pod "a67195a1-99d5-4e0f-998e-8abc0c4a1366" (UID: "a67195a1-99d5-4e0f-998e-8abc0c4a1366"). InnerVolumeSpecName "kube-api-access-jf5kc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.779940 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a67195a1-99d5-4e0f-998e-8abc0c4a1366" (UID: "a67195a1-99d5-4e0f-998e-8abc0c4a1366"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.831194 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jf5kc\" (UniqueName: \"kubernetes.io/projected/a67195a1-99d5-4e0f-998e-8abc0c4a1366-kube-api-access-jf5kc\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.831234 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a67195a1-99d5-4e0f-998e-8abc0c4a1366-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.831248 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a67195a1-99d5-4e0f-998e-8abc0c4a1366-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.831256 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.831269 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.868675 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a67195a1-99d5-4e0f-998e-8abc0c4a1366" (UID: "a67195a1-99d5-4e0f-998e-8abc0c4a1366"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.932726 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:44 crc kubenswrapper[5002]: I1203 16:53:44.992355 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-config-data" (OuterVolumeSpecName: "config-data") pod "a67195a1-99d5-4e0f-998e-8abc0c4a1366" (UID: "a67195a1-99d5-4e0f-998e-8abc0c4a1366"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.035099 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a67195a1-99d5-4e0f-998e-8abc0c4a1366-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.228669 5002 generic.go:334] "Generic (PLEG): container finished" podID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerID="35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5" exitCode=0 Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.228823 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.229072 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a67195a1-99d5-4e0f-998e-8abc0c4a1366","Type":"ContainerDied","Data":"35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5"} Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.229134 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a67195a1-99d5-4e0f-998e-8abc0c4a1366","Type":"ContainerDied","Data":"b74b93e32b020d6d056cb6d743951fcef38bf3f7b6842c87e0151c21e477fa0e"} Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.229156 5002 scope.go:117] "RemoveContainer" containerID="c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.248195 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d105ad9d-fbca-4a0c-b188-a88a363756c2","Type":"ContainerStarted","Data":"c338030fdc0b9dd2210114f065d62d29e7ab549361d1a0383112a39e58c2dc38"} Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.256830 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929","Type":"ContainerStarted","Data":"37c6c1754413201dda18005715d872051feb0d1aef179748ad24d74f8d9e9696"} Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.275825 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.285872 5002 scope.go:117] "RemoveContainer" containerID="e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.286797 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.309277 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.309691 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="proxy-httpd" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.309713 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="proxy-httpd" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.309740 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="416c4441-853a-4b67-95a9-79fc893fa7be" containerName="mariadb-account-create-update" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.309761 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="416c4441-853a-4b67-95a9-79fc893fa7be" containerName="mariadb-account-create-update" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.309773 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87c14106-47c6-4086-b10c-ca427875f3f0" containerName="mariadb-account-create-update" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.309779 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="87c14106-47c6-4086-b10c-ca427875f3f0" containerName="mariadb-account-create-update" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.309791 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c45df5c7-76f5-466f-9393-7815176634b6" containerName="mariadb-database-create" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.309799 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c45df5c7-76f5-466f-9393-7815176634b6" containerName="mariadb-database-create" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.309809 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="ceilometer-notification-agent" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.309817 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="ceilometer-notification-agent" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.309828 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e39aa75-8f7f-4fd2-864b-b0ba10f955ca" containerName="mariadb-database-create" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.309835 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e39aa75-8f7f-4fd2-864b-b0ba10f955ca" containerName="mariadb-database-create" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.309843 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85214ca4-e776-4b0a-893d-516243894640" containerName="mariadb-account-create-update" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.309849 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="85214ca4-e776-4b0a-893d-516243894640" containerName="mariadb-account-create-update" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.309860 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f384c1f-6c43-4273-9dd6-301c4aad47bd" containerName="mariadb-database-create" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.309866 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f384c1f-6c43-4273-9dd6-301c4aad47bd" containerName="mariadb-database-create" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.309880 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="ceilometer-central-agent" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.309886 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="ceilometer-central-agent" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.309902 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="sg-core" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.309907 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="sg-core" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.310068 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="87c14106-47c6-4086-b10c-ca427875f3f0" containerName="mariadb-account-create-update" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.310082 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="ceilometer-central-agent" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.310090 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="85214ca4-e776-4b0a-893d-516243894640" containerName="mariadb-account-create-update" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.310101 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="sg-core" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.310111 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e39aa75-8f7f-4fd2-864b-b0ba10f955ca" containerName="mariadb-database-create" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.310120 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="ceilometer-notification-agent" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.310134 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="416c4441-853a-4b67-95a9-79fc893fa7be" containerName="mariadb-account-create-update" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.310144 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f384c1f-6c43-4273-9dd6-301c4aad47bd" containerName="mariadb-database-create" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.310156 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" containerName="proxy-httpd" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.310167 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c45df5c7-76f5-466f-9393-7815176634b6" containerName="mariadb-database-create" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.311764 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.316241 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.317804 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.318294 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.345426 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rltcq\" (UniqueName: \"kubernetes.io/projected/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-kube-api-access-rltcq\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.345942 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.345978 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.346058 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-config-data\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.346101 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-run-httpd\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.346129 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-log-httpd\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.346165 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-scripts\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.357200 5002 scope.go:117] "RemoveContainer" containerID="2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.389640 5002 scope.go:117] "RemoveContainer" containerID="35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.424766 5002 scope.go:117] "RemoveContainer" containerID="c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.425407 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9\": container with ID starting with c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9 not found: ID does not exist" containerID="c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.425443 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9"} err="failed to get container status \"c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9\": rpc error: code = NotFound desc = could not find container \"c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9\": container with ID starting with c189181fb569e198a356489b716d57aedafea5e74aa114fd503ba6394f0d49b9 not found: ID does not exist" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.425470 5002 scope.go:117] "RemoveContainer" containerID="e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.425856 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55\": container with ID starting with e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55 not found: ID does not exist" containerID="e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.425883 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55"} err="failed to get container status \"e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55\": rpc error: code = NotFound desc = could not find container \"e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55\": container with ID starting with e3424309f333b863c52fd0b497ee1fb65a196e6b7787b2e03568464a9e52fd55 not found: ID does not exist" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.425898 5002 scope.go:117] "RemoveContainer" containerID="2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.426302 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48\": container with ID starting with 2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48 not found: ID does not exist" containerID="2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.426355 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48"} err="failed to get container status \"2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48\": rpc error: code = NotFound desc = could not find container \"2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48\": container with ID starting with 2d5bf6d3c968d665415c6f4d310bb566479522a8de119bbeb4d87c5833b23d48 not found: ID does not exist" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.426370 5002 scope.go:117] "RemoveContainer" containerID="35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5" Dec 03 16:53:45 crc kubenswrapper[5002]: E1203 16:53:45.426702 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5\": container with ID starting with 35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5 not found: ID does not exist" containerID="35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.426732 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5"} err="failed to get container status \"35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5\": rpc error: code = NotFound desc = could not find container \"35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5\": container with ID starting with 35fc7cf86c1fffb0d692406eeab8f63283227b223e9c1bfc13b1b2951209efb5 not found: ID does not exist" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.447462 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rltcq\" (UniqueName: \"kubernetes.io/projected/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-kube-api-access-rltcq\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.447524 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.447547 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.447597 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-config-data\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.447631 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-run-httpd\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.447650 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-log-httpd\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.447673 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-scripts\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.450032 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-run-httpd\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.450125 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-log-httpd\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.456465 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.456618 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-scripts\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.456830 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.465117 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-config-data\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.480915 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rltcq\" (UniqueName: \"kubernetes.io/projected/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-kube-api-access-rltcq\") pod \"ceilometer-0\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.636640 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:53:45 crc kubenswrapper[5002]: I1203 16:53:45.950309 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:46 crc kubenswrapper[5002]: I1203 16:53:46.269992 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c53a201f-6aca-4a6c-a51e-212e7a9f1c64","Type":"ContainerStarted","Data":"11bdf52c238152c439fe1dd48b73c963cedf44f19a87b99c228cc0d952cd587b"} Dec 03 16:53:46 crc kubenswrapper[5002]: I1203 16:53:46.272926 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929","Type":"ContainerStarted","Data":"d8f143f4c81641c282592b60722c3b70b5b045ac7605f798b8c288b1a27c879f"} Dec 03 16:53:46 crc kubenswrapper[5002]: I1203 16:53:46.277541 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d105ad9d-fbca-4a0c-b188-a88a363756c2","Type":"ContainerStarted","Data":"344937d693aca613f5d6c7658a05a5e864b67f0cdbdd5fe03d3655235754d316"} Dec 03 16:53:46 crc kubenswrapper[5002]: I1203 16:53:46.301344 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.30132334 podStartE2EDuration="4.30132334s" podCreationTimestamp="2025-12-03 16:53:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:53:46.296648082 +0000 UTC m=+1349.710469970" watchObservedRunningTime="2025-12-03 16:53:46.30132334 +0000 UTC m=+1349.715145218" Dec 03 16:53:46 crc kubenswrapper[5002]: I1203 16:53:46.332629 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.332601611 podStartE2EDuration="4.332601611s" podCreationTimestamp="2025-12-03 16:53:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:53:46.322552158 +0000 UTC m=+1349.736374046" watchObservedRunningTime="2025-12-03 16:53:46.332601611 +0000 UTC m=+1349.746423499" Dec 03 16:53:46 crc kubenswrapper[5002]: I1203 16:53:46.873002 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a67195a1-99d5-4e0f-998e-8abc0c4a1366" path="/var/lib/kubelet/pods/a67195a1-99d5-4e0f-998e-8abc0c4a1366/volumes" Dec 03 16:53:47 crc kubenswrapper[5002]: I1203 16:53:47.289936 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c53a201f-6aca-4a6c-a51e-212e7a9f1c64","Type":"ContainerStarted","Data":"211faa4e1377dc2e6411cea3c505af826123b2ce6b5c336cf12ce43b83bf3272"} Dec 03 16:53:48 crc kubenswrapper[5002]: I1203 16:53:48.300343 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c53a201f-6aca-4a6c-a51e-212e7a9f1c64","Type":"ContainerStarted","Data":"591c738cf7ad4579586313a549e2540266e66d36b59b2db39b029f01c9a8b3a4"} Dec 03 16:53:48 crc kubenswrapper[5002]: I1203 16:53:48.300759 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c53a201f-6aca-4a6c-a51e-212e7a9f1c64","Type":"ContainerStarted","Data":"f0c2289a41ba9e9402d0dd102555d505c9cd151880c95a9e2b1b52ee262cdb90"} Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.030130 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jrk64"] Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.031721 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.060888 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.061022 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-lpd6h" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.062153 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-config-data\") pod \"nova-cell0-conductor-db-sync-jrk64\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.062442 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jrk64\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.062640 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsp9t\" (UniqueName: \"kubernetes.io/projected/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-kube-api-access-hsp9t\") pod \"nova-cell0-conductor-db-sync-jrk64\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.062712 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-scripts\") pod \"nova-cell0-conductor-db-sync-jrk64\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.062879 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.078162 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jrk64"] Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.163939 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jrk64\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.164017 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsp9t\" (UniqueName: \"kubernetes.io/projected/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-kube-api-access-hsp9t\") pod \"nova-cell0-conductor-db-sync-jrk64\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.164048 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-scripts\") pod \"nova-cell0-conductor-db-sync-jrk64\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.164121 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-config-data\") pod \"nova-cell0-conductor-db-sync-jrk64\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.174641 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-scripts\") pod \"nova-cell0-conductor-db-sync-jrk64\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.174700 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jrk64\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.187854 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsp9t\" (UniqueName: \"kubernetes.io/projected/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-kube-api-access-hsp9t\") pod \"nova-cell0-conductor-db-sync-jrk64\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.188061 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-config-data\") pod \"nova-cell0-conductor-db-sync-jrk64\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.388124 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:53:49 crc kubenswrapper[5002]: I1203 16:53:49.917864 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jrk64"] Dec 03 16:53:50 crc kubenswrapper[5002]: I1203 16:53:50.320022 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jrk64" event={"ID":"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88","Type":"ContainerStarted","Data":"1e31bd88e8cdb0178b61416a0ea292d35e94bb71c319d3563a625eff63c6b0fa"} Dec 03 16:53:50 crc kubenswrapper[5002]: I1203 16:53:50.322590 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c53a201f-6aca-4a6c-a51e-212e7a9f1c64","Type":"ContainerStarted","Data":"eccb737edd2942494563d971a4a0cd7ab2b2026690689899739e491e7c550650"} Dec 03 16:53:50 crc kubenswrapper[5002]: I1203 16:53:50.322791 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:53:50 crc kubenswrapper[5002]: I1203 16:53:50.345807 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.082446789 podStartE2EDuration="5.345785779s" podCreationTimestamp="2025-12-03 16:53:45 +0000 UTC" firstStartedPulling="2025-12-03 16:53:45.966503593 +0000 UTC m=+1349.380325481" lastFinishedPulling="2025-12-03 16:53:49.229842583 +0000 UTC m=+1352.643664471" observedRunningTime="2025-12-03 16:53:50.343344464 +0000 UTC m=+1353.757166352" watchObservedRunningTime="2025-12-03 16:53:50.345785779 +0000 UTC m=+1353.759607677" Dec 03 16:53:52 crc kubenswrapper[5002]: I1203 16:53:52.624832 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 16:53:52 crc kubenswrapper[5002]: I1203 16:53:52.625805 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 16:53:52 crc kubenswrapper[5002]: I1203 16:53:52.654112 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:52 crc kubenswrapper[5002]: I1203 16:53:52.654221 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:52 crc kubenswrapper[5002]: I1203 16:53:52.690010 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 16:53:52 crc kubenswrapper[5002]: I1203 16:53:52.714270 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:52 crc kubenswrapper[5002]: I1203 16:53:52.733485 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 16:53:52 crc kubenswrapper[5002]: I1203 16:53:52.734242 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:53 crc kubenswrapper[5002]: I1203 16:53:53.356640 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:53 crc kubenswrapper[5002]: I1203 16:53:53.356706 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:53 crc kubenswrapper[5002]: I1203 16:53:53.356718 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 16:53:53 crc kubenswrapper[5002]: I1203 16:53:53.356729 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 16:53:53 crc kubenswrapper[5002]: I1203 16:53:53.474494 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:53:53 crc kubenswrapper[5002]: I1203 16:53:53.474863 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="ceilometer-central-agent" containerID="cri-o://211faa4e1377dc2e6411cea3c505af826123b2ce6b5c336cf12ce43b83bf3272" gracePeriod=30 Dec 03 16:53:53 crc kubenswrapper[5002]: I1203 16:53:53.474992 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="ceilometer-notification-agent" containerID="cri-o://f0c2289a41ba9e9402d0dd102555d505c9cd151880c95a9e2b1b52ee262cdb90" gracePeriod=30 Dec 03 16:53:53 crc kubenswrapper[5002]: I1203 16:53:53.474989 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="proxy-httpd" containerID="cri-o://eccb737edd2942494563d971a4a0cd7ab2b2026690689899739e491e7c550650" gracePeriod=30 Dec 03 16:53:53 crc kubenswrapper[5002]: I1203 16:53:53.475037 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="sg-core" containerID="cri-o://591c738cf7ad4579586313a549e2540266e66d36b59b2db39b029f01c9a8b3a4" gracePeriod=30 Dec 03 16:53:54 crc kubenswrapper[5002]: I1203 16:53:54.373276 5002 generic.go:334] "Generic (PLEG): container finished" podID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerID="eccb737edd2942494563d971a4a0cd7ab2b2026690689899739e491e7c550650" exitCode=0 Dec 03 16:53:54 crc kubenswrapper[5002]: I1203 16:53:54.373647 5002 generic.go:334] "Generic (PLEG): container finished" podID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerID="591c738cf7ad4579586313a549e2540266e66d36b59b2db39b029f01c9a8b3a4" exitCode=2 Dec 03 16:53:54 crc kubenswrapper[5002]: I1203 16:53:54.373366 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c53a201f-6aca-4a6c-a51e-212e7a9f1c64","Type":"ContainerDied","Data":"eccb737edd2942494563d971a4a0cd7ab2b2026690689899739e491e7c550650"} Dec 03 16:53:54 crc kubenswrapper[5002]: I1203 16:53:54.373702 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c53a201f-6aca-4a6c-a51e-212e7a9f1c64","Type":"ContainerDied","Data":"591c738cf7ad4579586313a549e2540266e66d36b59b2db39b029f01c9a8b3a4"} Dec 03 16:53:54 crc kubenswrapper[5002]: I1203 16:53:54.373718 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c53a201f-6aca-4a6c-a51e-212e7a9f1c64","Type":"ContainerDied","Data":"f0c2289a41ba9e9402d0dd102555d505c9cd151880c95a9e2b1b52ee262cdb90"} Dec 03 16:53:54 crc kubenswrapper[5002]: I1203 16:53:54.373657 5002 generic.go:334] "Generic (PLEG): container finished" podID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerID="f0c2289a41ba9e9402d0dd102555d505c9cd151880c95a9e2b1b52ee262cdb90" exitCode=0 Dec 03 16:53:54 crc kubenswrapper[5002]: I1203 16:53:54.373758 5002 generic.go:334] "Generic (PLEG): container finished" podID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerID="211faa4e1377dc2e6411cea3c505af826123b2ce6b5c336cf12ce43b83bf3272" exitCode=0 Dec 03 16:53:54 crc kubenswrapper[5002]: I1203 16:53:54.373833 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c53a201f-6aca-4a6c-a51e-212e7a9f1c64","Type":"ContainerDied","Data":"211faa4e1377dc2e6411cea3c505af826123b2ce6b5c336cf12ce43b83bf3272"} Dec 03 16:53:55 crc kubenswrapper[5002]: I1203 16:53:55.375320 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 16:53:55 crc kubenswrapper[5002]: I1203 16:53:55.387563 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:53:55 crc kubenswrapper[5002]: I1203 16:53:55.831224 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:55 crc kubenswrapper[5002]: I1203 16:53:55.831375 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:53:55 crc kubenswrapper[5002]: I1203 16:53:55.844669 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 16:53:56 crc kubenswrapper[5002]: I1203 16:53:56.015115 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.585625 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.659232 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-run-httpd\") pod \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.659769 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c53a201f-6aca-4a6c-a51e-212e7a9f1c64" (UID: "c53a201f-6aca-4a6c-a51e-212e7a9f1c64"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.660174 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rltcq\" (UniqueName: \"kubernetes.io/projected/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-kube-api-access-rltcq\") pod \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.660270 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-combined-ca-bundle\") pod \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.660370 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-log-httpd\") pod \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.660459 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-config-data\") pod \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.660507 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-scripts\") pod \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.660549 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-sg-core-conf-yaml\") pod \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\" (UID: \"c53a201f-6aca-4a6c-a51e-212e7a9f1c64\") " Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.661178 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c53a201f-6aca-4a6c-a51e-212e7a9f1c64" (UID: "c53a201f-6aca-4a6c-a51e-212e7a9f1c64"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.661603 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.661634 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.666552 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-scripts" (OuterVolumeSpecName: "scripts") pod "c53a201f-6aca-4a6c-a51e-212e7a9f1c64" (UID: "c53a201f-6aca-4a6c-a51e-212e7a9f1c64"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.668261 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-kube-api-access-rltcq" (OuterVolumeSpecName: "kube-api-access-rltcq") pod "c53a201f-6aca-4a6c-a51e-212e7a9f1c64" (UID: "c53a201f-6aca-4a6c-a51e-212e7a9f1c64"). InnerVolumeSpecName "kube-api-access-rltcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.691111 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c53a201f-6aca-4a6c-a51e-212e7a9f1c64" (UID: "c53a201f-6aca-4a6c-a51e-212e7a9f1c64"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.733733 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c53a201f-6aca-4a6c-a51e-212e7a9f1c64" (UID: "c53a201f-6aca-4a6c-a51e-212e7a9f1c64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.762978 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rltcq\" (UniqueName: \"kubernetes.io/projected/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-kube-api-access-rltcq\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.763031 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.763041 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.763050 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.763797 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-config-data" (OuterVolumeSpecName: "config-data") pod "c53a201f-6aca-4a6c-a51e-212e7a9f1c64" (UID: "c53a201f-6aca-4a6c-a51e-212e7a9f1c64"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:00 crc kubenswrapper[5002]: I1203 16:54:00.864738 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c53a201f-6aca-4a6c-a51e-212e7a9f1c64-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.460078 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jrk64" event={"ID":"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88","Type":"ContainerStarted","Data":"be0b486777dc09b33af931a7bba8f0f3a49cbd01154dc708e1b7333dca4661eb"} Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.463492 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c53a201f-6aca-4a6c-a51e-212e7a9f1c64","Type":"ContainerDied","Data":"11bdf52c238152c439fe1dd48b73c963cedf44f19a87b99c228cc0d952cd587b"} Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.463672 5002 scope.go:117] "RemoveContainer" containerID="eccb737edd2942494563d971a4a0cd7ab2b2026690689899739e491e7c550650" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.463731 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.486964 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-jrk64" podStartSLOduration=2.127297608 podStartE2EDuration="12.486940208s" podCreationTimestamp="2025-12-03 16:53:49 +0000 UTC" firstStartedPulling="2025-12-03 16:53:49.922553796 +0000 UTC m=+1353.336375684" lastFinishedPulling="2025-12-03 16:54:00.282196356 +0000 UTC m=+1363.696018284" observedRunningTime="2025-12-03 16:54:01.483822823 +0000 UTC m=+1364.897644711" watchObservedRunningTime="2025-12-03 16:54:01.486940208 +0000 UTC m=+1364.900762096" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.487996 5002 scope.go:117] "RemoveContainer" containerID="591c738cf7ad4579586313a549e2540266e66d36b59b2db39b029f01c9a8b3a4" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.505872 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.509761 5002 scope.go:117] "RemoveContainer" containerID="f0c2289a41ba9e9402d0dd102555d505c9cd151880c95a9e2b1b52ee262cdb90" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.517443 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.529900 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:54:01 crc kubenswrapper[5002]: E1203 16:54:01.530709 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="ceilometer-notification-agent" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.530870 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="ceilometer-notification-agent" Dec 03 16:54:01 crc kubenswrapper[5002]: E1203 16:54:01.530966 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="proxy-httpd" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.531024 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="proxy-httpd" Dec 03 16:54:01 crc kubenswrapper[5002]: E1203 16:54:01.531089 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="ceilometer-central-agent" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.531154 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="ceilometer-central-agent" Dec 03 16:54:01 crc kubenswrapper[5002]: E1203 16:54:01.531230 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="sg-core" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.531286 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="sg-core" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.531562 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="sg-core" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.531641 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="ceilometer-central-agent" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.531694 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="ceilometer-notification-agent" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.531790 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" containerName="proxy-httpd" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.532875 5002 scope.go:117] "RemoveContainer" containerID="211faa4e1377dc2e6411cea3c505af826123b2ce6b5c336cf12ce43b83bf3272" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.536703 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.540882 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.541619 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.541900 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.683022 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mck8w\" (UniqueName: \"kubernetes.io/projected/a505c174-5d4d-41d8-8ec1-040fda970026-kube-api-access-mck8w\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.683085 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-scripts\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.683112 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a505c174-5d4d-41d8-8ec1-040fda970026-run-httpd\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.683159 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.683207 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-config-data\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.683233 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.683293 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a505c174-5d4d-41d8-8ec1-040fda970026-log-httpd\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.785304 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mck8w\" (UniqueName: \"kubernetes.io/projected/a505c174-5d4d-41d8-8ec1-040fda970026-kube-api-access-mck8w\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.785692 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-scripts\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.785725 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a505c174-5d4d-41d8-8ec1-040fda970026-run-httpd\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.785812 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.785851 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-config-data\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.785890 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.785937 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a505c174-5d4d-41d8-8ec1-040fda970026-log-httpd\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.786541 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a505c174-5d4d-41d8-8ec1-040fda970026-log-httpd\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.786806 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a505c174-5d4d-41d8-8ec1-040fda970026-run-httpd\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.793145 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.794469 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.795244 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-config-data\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.803592 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-scripts\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.818352 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mck8w\" (UniqueName: \"kubernetes.io/projected/a505c174-5d4d-41d8-8ec1-040fda970026-kube-api-access-mck8w\") pod \"ceilometer-0\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " pod="openstack/ceilometer-0" Dec 03 16:54:01 crc kubenswrapper[5002]: I1203 16:54:01.881514 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:54:02 crc kubenswrapper[5002]: I1203 16:54:02.421623 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:54:02 crc kubenswrapper[5002]: W1203 16:54:02.433380 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda505c174_5d4d_41d8_8ec1_040fda970026.slice/crio-921393ea0ecf713056852f2639c981f8fac47ac0a4c8052122f093994c086bb8 WatchSource:0}: Error finding container 921393ea0ecf713056852f2639c981f8fac47ac0a4c8052122f093994c086bb8: Status 404 returned error can't find the container with id 921393ea0ecf713056852f2639c981f8fac47ac0a4c8052122f093994c086bb8 Dec 03 16:54:02 crc kubenswrapper[5002]: I1203 16:54:02.478612 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a505c174-5d4d-41d8-8ec1-040fda970026","Type":"ContainerStarted","Data":"921393ea0ecf713056852f2639c981f8fac47ac0a4c8052122f093994c086bb8"} Dec 03 16:54:02 crc kubenswrapper[5002]: I1203 16:54:02.854569 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c53a201f-6aca-4a6c-a51e-212e7a9f1c64" path="/var/lib/kubelet/pods/c53a201f-6aca-4a6c-a51e-212e7a9f1c64/volumes" Dec 03 16:54:03 crc kubenswrapper[5002]: I1203 16:54:03.494877 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a505c174-5d4d-41d8-8ec1-040fda970026","Type":"ContainerStarted","Data":"c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64"} Dec 03 16:54:04 crc kubenswrapper[5002]: I1203 16:54:04.514486 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a505c174-5d4d-41d8-8ec1-040fda970026","Type":"ContainerStarted","Data":"b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec"} Dec 03 16:54:04 crc kubenswrapper[5002]: I1203 16:54:04.515244 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a505c174-5d4d-41d8-8ec1-040fda970026","Type":"ContainerStarted","Data":"d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28"} Dec 03 16:54:06 crc kubenswrapper[5002]: I1203 16:54:06.534454 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a505c174-5d4d-41d8-8ec1-040fda970026","Type":"ContainerStarted","Data":"22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034"} Dec 03 16:54:06 crc kubenswrapper[5002]: I1203 16:54:06.534966 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:54:06 crc kubenswrapper[5002]: I1203 16:54:06.572157 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9200729330000001 podStartE2EDuration="5.572131238s" podCreationTimestamp="2025-12-03 16:54:01 +0000 UTC" firstStartedPulling="2025-12-03 16:54:02.436843338 +0000 UTC m=+1365.850665226" lastFinishedPulling="2025-12-03 16:54:06.088901613 +0000 UTC m=+1369.502723531" observedRunningTime="2025-12-03 16:54:06.562554508 +0000 UTC m=+1369.976376436" watchObservedRunningTime="2025-12-03 16:54:06.572131238 +0000 UTC m=+1369.985953166" Dec 03 16:54:11 crc kubenswrapper[5002]: I1203 16:54:11.590996 5002 generic.go:334] "Generic (PLEG): container finished" podID="7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88" containerID="be0b486777dc09b33af931a7bba8f0f3a49cbd01154dc708e1b7333dca4661eb" exitCode=0 Dec 03 16:54:11 crc kubenswrapper[5002]: I1203 16:54:11.591083 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jrk64" event={"ID":"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88","Type":"ContainerDied","Data":"be0b486777dc09b33af931a7bba8f0f3a49cbd01154dc708e1b7333dca4661eb"} Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.021478 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.182455 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-scripts\") pod \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.183146 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-config-data\") pod \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.183300 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsp9t\" (UniqueName: \"kubernetes.io/projected/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-kube-api-access-hsp9t\") pod \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.183334 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-combined-ca-bundle\") pod \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\" (UID: \"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88\") " Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.190275 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-scripts" (OuterVolumeSpecName: "scripts") pod "7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88" (UID: "7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.204204 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-kube-api-access-hsp9t" (OuterVolumeSpecName: "kube-api-access-hsp9t") pod "7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88" (UID: "7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88"). InnerVolumeSpecName "kube-api-access-hsp9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.216384 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88" (UID: "7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.237326 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-config-data" (OuterVolumeSpecName: "config-data") pod "7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88" (UID: "7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.286460 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.286570 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.286593 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsp9t\" (UniqueName: \"kubernetes.io/projected/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-kube-api-access-hsp9t\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.286614 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.616312 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jrk64" event={"ID":"7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88","Type":"ContainerDied","Data":"1e31bd88e8cdb0178b61416a0ea292d35e94bb71c319d3563a625eff63c6b0fa"} Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.616392 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e31bd88e8cdb0178b61416a0ea292d35e94bb71c319d3563a625eff63c6b0fa" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.616412 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jrk64" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.835915 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 16:54:13 crc kubenswrapper[5002]: E1203 16:54:13.836530 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88" containerName="nova-cell0-conductor-db-sync" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.836559 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88" containerName="nova-cell0-conductor-db-sync" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.836958 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88" containerName="nova-cell0-conductor-db-sync" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.837960 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.841018 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-lpd6h" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.841048 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 16:54:13 crc kubenswrapper[5002]: I1203 16:54:13.851080 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 16:54:14 crc kubenswrapper[5002]: I1203 16:54:14.010727 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:14 crc kubenswrapper[5002]: I1203 16:54:14.010839 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7jrt\" (UniqueName: \"kubernetes.io/projected/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-kube-api-access-s7jrt\") pod \"nova-cell0-conductor-0\" (UID: \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:14 crc kubenswrapper[5002]: I1203 16:54:14.010889 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:14 crc kubenswrapper[5002]: I1203 16:54:14.112958 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:14 crc kubenswrapper[5002]: I1203 16:54:14.113149 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:14 crc kubenswrapper[5002]: I1203 16:54:14.113209 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7jrt\" (UniqueName: \"kubernetes.io/projected/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-kube-api-access-s7jrt\") pod \"nova-cell0-conductor-0\" (UID: \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:14 crc kubenswrapper[5002]: I1203 16:54:14.118651 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:14 crc kubenswrapper[5002]: I1203 16:54:14.128046 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:14 crc kubenswrapper[5002]: I1203 16:54:14.144008 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7jrt\" (UniqueName: \"kubernetes.io/projected/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-kube-api-access-s7jrt\") pod \"nova-cell0-conductor-0\" (UID: \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:14 crc kubenswrapper[5002]: I1203 16:54:14.176471 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:14 crc kubenswrapper[5002]: I1203 16:54:14.677842 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 16:54:15 crc kubenswrapper[5002]: I1203 16:54:15.664428 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7","Type":"ContainerStarted","Data":"fcdafeed97d1bf71baec2b1679ea4a83c44d81bf0870a234df0c8a45883f5c37"} Dec 03 16:54:15 crc kubenswrapper[5002]: I1203 16:54:15.664938 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:15 crc kubenswrapper[5002]: I1203 16:54:15.664966 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7","Type":"ContainerStarted","Data":"66f67ba3873bdf4919add8f6b9e00a994f2feb3122f9ada2e6fc4a7f619ff451"} Dec 03 16:54:15 crc kubenswrapper[5002]: I1203 16:54:15.689469 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.689442002 podStartE2EDuration="2.689442002s" podCreationTimestamp="2025-12-03 16:54:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:54:15.685045252 +0000 UTC m=+1379.098867240" watchObservedRunningTime="2025-12-03 16:54:15.689442002 +0000 UTC m=+1379.103263890" Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.226765 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.774498 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-cfw2x"] Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.776414 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.778729 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.778965 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.797546 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-cfw2x"] Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.946802 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.948691 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.952515 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.953046 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntsmk\" (UniqueName: \"kubernetes.io/projected/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-kube-api-access-ntsmk\") pod \"nova-cell0-cell-mapping-cfw2x\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.953087 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cfw2x\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.953108 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-config-data\") pod \"nova-cell0-cell-mapping-cfw2x\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.953209 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-scripts\") pod \"nova-cell0-cell-mapping-cfw2x\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:19 crc kubenswrapper[5002]: I1203 16:54:19.962268 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.042409 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.043982 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.052294 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.070016 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6203f8d5-2c93-46b0-8e3b-151330a87e92-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.070100 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-config-data\") pod \"nova-metadata-0\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.070134 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6203f8d5-2c93-46b0-8e3b-151330a87e92-config-data\") pod \"nova-api-0\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.070151 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-logs\") pod \"nova-metadata-0\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.070165 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9b4fh\" (UniqueName: \"kubernetes.io/projected/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-kube-api-access-9b4fh\") pod \"nova-metadata-0\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.070193 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.070216 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-scripts\") pod \"nova-cell0-cell-mapping-cfw2x\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.070249 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6203f8d5-2c93-46b0-8e3b-151330a87e92-logs\") pod \"nova-api-0\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.070285 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntsmk\" (UniqueName: \"kubernetes.io/projected/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-kube-api-access-ntsmk\") pod \"nova-cell0-cell-mapping-cfw2x\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.070308 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cfw2x\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.070324 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwrsm\" (UniqueName: \"kubernetes.io/projected/6203f8d5-2c93-46b0-8e3b-151330a87e92-kube-api-access-qwrsm\") pod \"nova-api-0\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.070344 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-config-data\") pod \"nova-cell0-cell-mapping-cfw2x\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.073483 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.081548 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-scripts\") pod \"nova-cell0-cell-mapping-cfw2x\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.085774 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-config-data\") pod \"nova-cell0-cell-mapping-cfw2x\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.091166 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cfw2x\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.160685 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntsmk\" (UniqueName: \"kubernetes.io/projected/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-kube-api-access-ntsmk\") pod \"nova-cell0-cell-mapping-cfw2x\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.173862 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-config-data\") pod \"nova-metadata-0\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.173938 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6203f8d5-2c93-46b0-8e3b-151330a87e92-config-data\") pod \"nova-api-0\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.173960 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-logs\") pod \"nova-metadata-0\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.173977 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9b4fh\" (UniqueName: \"kubernetes.io/projected/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-kube-api-access-9b4fh\") pod \"nova-metadata-0\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.174004 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.174044 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6203f8d5-2c93-46b0-8e3b-151330a87e92-logs\") pod \"nova-api-0\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.174089 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwrsm\" (UniqueName: \"kubernetes.io/projected/6203f8d5-2c93-46b0-8e3b-151330a87e92-kube-api-access-qwrsm\") pod \"nova-api-0\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.174147 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6203f8d5-2c93-46b0-8e3b-151330a87e92-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.180325 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6203f8d5-2c93-46b0-8e3b-151330a87e92-logs\") pod \"nova-api-0\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.204055 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-logs\") pod \"nova-metadata-0\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.205197 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-config-data\") pod \"nova-metadata-0\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.208517 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6203f8d5-2c93-46b0-8e3b-151330a87e92-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.214571 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6203f8d5-2c93-46b0-8e3b-151330a87e92-config-data\") pod \"nova-api-0\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.214700 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c4475fdfc-pv642"] Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.215622 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.225734 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.235624 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.237333 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.240449 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwrsm\" (UniqueName: \"kubernetes.io/projected/6203f8d5-2c93-46b0-8e3b-151330a87e92-kube-api-access-qwrsm\") pod \"nova-api-0\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.241002 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.254436 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9b4fh\" (UniqueName: \"kubernetes.io/projected/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-kube-api-access-9b4fh\") pod \"nova-metadata-0\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.277481 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.294075 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.300027 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c4475fdfc-pv642"] Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.314066 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.315353 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.317825 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.339436 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.365955 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.394811 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.394857 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.394899 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.394966 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djhx8\" (UniqueName: \"kubernetes.io/projected/7ca5d77b-a995-41c0-ba36-54a896f63a7a-kube-api-access-djhx8\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.395001 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-dns-svc\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.395025 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.395047 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gwgp\" (UniqueName: \"kubernetes.io/projected/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-kube-api-access-8gwgp\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.395067 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-config\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.395089 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.396834 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.496943 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.497387 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.497427 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91331d30-b807-4f96-b1c6-4ca6be2b36bc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.497492 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91331d30-b807-4f96-b1c6-4ca6be2b36bc-config-data\") pod \"nova-scheduler-0\" (UID: \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.497522 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djhx8\" (UniqueName: \"kubernetes.io/projected/7ca5d77b-a995-41c0-ba36-54a896f63a7a-kube-api-access-djhx8\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.497557 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-dns-svc\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.497644 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.497796 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gwgp\" (UniqueName: \"kubernetes.io/projected/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-kube-api-access-8gwgp\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.497848 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-config\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.497906 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.497971 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p97rk\" (UniqueName: \"kubernetes.io/projected/91331d30-b807-4f96-b1c6-4ca6be2b36bc-kube-api-access-p97rk\") pod \"nova-scheduler-0\" (UID: \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.498030 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.498954 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.499547 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.499937 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-config\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.501916 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.502785 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-dns-svc\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.507918 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.518038 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.521718 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djhx8\" (UniqueName: \"kubernetes.io/projected/7ca5d77b-a995-41c0-ba36-54a896f63a7a-kube-api-access-djhx8\") pod \"dnsmasq-dns-5c4475fdfc-pv642\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.526266 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gwgp\" (UniqueName: \"kubernetes.io/projected/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-kube-api-access-8gwgp\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.600335 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p97rk\" (UniqueName: \"kubernetes.io/projected/91331d30-b807-4f96-b1c6-4ca6be2b36bc-kube-api-access-p97rk\") pod \"nova-scheduler-0\" (UID: \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.600468 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91331d30-b807-4f96-b1c6-4ca6be2b36bc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.600536 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91331d30-b807-4f96-b1c6-4ca6be2b36bc-config-data\") pod \"nova-scheduler-0\" (UID: \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.612442 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91331d30-b807-4f96-b1c6-4ca6be2b36bc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.612722 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91331d30-b807-4f96-b1c6-4ca6be2b36bc-config-data\") pod \"nova-scheduler-0\" (UID: \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.621151 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p97rk\" (UniqueName: \"kubernetes.io/projected/91331d30-b807-4f96-b1c6-4ca6be2b36bc-kube-api-access-p97rk\") pod \"nova-scheduler-0\" (UID: \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.634912 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.659327 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.678822 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.824216 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:54:20 crc kubenswrapper[5002]: W1203 16:54:20.829717 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6203f8d5_2c93_46b0_8e3b_151330a87e92.slice/crio-8cd921deb73bf99e14d047a1fa70c4ad06d6b835332dd939276a201621cd7be2 WatchSource:0}: Error finding container 8cd921deb73bf99e14d047a1fa70c4ad06d6b835332dd939276a201621cd7be2: Status 404 returned error can't find the container with id 8cd921deb73bf99e14d047a1fa70c4ad06d6b835332dd939276a201621cd7be2 Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.913566 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.931519 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-2zspw"] Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.934854 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.936892 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.937466 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 03 16:54:20 crc kubenswrapper[5002]: I1203 16:54:20.949247 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-2zspw"] Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.060253 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-cfw2x"] Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.128657 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-scripts\") pod \"nova-cell1-conductor-db-sync-2zspw\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.128724 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m27t\" (UniqueName: \"kubernetes.io/projected/b405bac2-d301-4132-bd82-a1c7d0b0df6c-kube-api-access-6m27t\") pod \"nova-cell1-conductor-db-sync-2zspw\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.128805 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-config-data\") pod \"nova-cell1-conductor-db-sync-2zspw\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.128838 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-2zspw\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.157618 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.176490 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c4475fdfc-pv642"] Dec 03 16:54:21 crc kubenswrapper[5002]: W1203 16:54:21.179595 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ca5d77b_a995_41c0_ba36_54a896f63a7a.slice/crio-7755eff49c96a2e5b4f8d12a176dd46291a19867da56e49f826ce49b38eac8d7 WatchSource:0}: Error finding container 7755eff49c96a2e5b4f8d12a176dd46291a19867da56e49f826ce49b38eac8d7: Status 404 returned error can't find the container with id 7755eff49c96a2e5b4f8d12a176dd46291a19867da56e49f826ce49b38eac8d7 Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.230961 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-scripts\") pod \"nova-cell1-conductor-db-sync-2zspw\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.231281 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m27t\" (UniqueName: \"kubernetes.io/projected/b405bac2-d301-4132-bd82-a1c7d0b0df6c-kube-api-access-6m27t\") pod \"nova-cell1-conductor-db-sync-2zspw\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.231345 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-config-data\") pod \"nova-cell1-conductor-db-sync-2zspw\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.231374 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-2zspw\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.235119 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-scripts\") pod \"nova-cell1-conductor-db-sync-2zspw\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.236327 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-2zspw\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.237324 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-config-data\") pod \"nova-cell1-conductor-db-sync-2zspw\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.248095 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m27t\" (UniqueName: \"kubernetes.io/projected/b405bac2-d301-4132-bd82-a1c7d0b0df6c-kube-api-access-6m27t\") pod \"nova-cell1-conductor-db-sync-2zspw\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.272388 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.400078 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.731225 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cfw2x" event={"ID":"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b","Type":"ContainerStarted","Data":"c5d0ede319ccf188efb27569b868b26ca5f84ea64bed7ce6823e0f0aa90d0477"} Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.731867 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cfw2x" event={"ID":"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b","Type":"ContainerStarted","Data":"3e9d692b43ed305412343553b0de4f6b8fc0fa3dcc31dd4699d2374e50937476"} Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.734974 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6203f8d5-2c93-46b0-8e3b-151330a87e92","Type":"ContainerStarted","Data":"8cd921deb73bf99e14d047a1fa70c4ad06d6b835332dd939276a201621cd7be2"} Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.738395 5002 generic.go:334] "Generic (PLEG): container finished" podID="7ca5d77b-a995-41c0-ba36-54a896f63a7a" containerID="71eeef36c6f1dadc59d3a781bfcbfbd8460dc45c062387e637b722d598dd4705" exitCode=0 Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.739358 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" event={"ID":"7ca5d77b-a995-41c0-ba36-54a896f63a7a","Type":"ContainerDied","Data":"71eeef36c6f1dadc59d3a781bfcbfbd8460dc45c062387e637b722d598dd4705"} Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.739392 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" event={"ID":"7ca5d77b-a995-41c0-ba36-54a896f63a7a","Type":"ContainerStarted","Data":"7755eff49c96a2e5b4f8d12a176dd46291a19867da56e49f826ce49b38eac8d7"} Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.746560 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb","Type":"ContainerStarted","Data":"8db8880184a64222899c81c3c8b536bd444ca369f57504c0ee51b10a30cc65c0"} Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.756868 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ab82e179-d6cc-4942-9eb9-9b307cdaaff0","Type":"ContainerStarted","Data":"370dffc6fef6f2275b437bab9278fe2365871f426207277ea17b6e8c1f7420b2"} Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.761543 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"91331d30-b807-4f96-b1c6-4ca6be2b36bc","Type":"ContainerStarted","Data":"54d9a78f3c8bbfd2b3d879e2c76ae607f30e6ce331df0779314980339a07d3a1"} Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.764780 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-cfw2x" podStartSLOduration=2.764763525 podStartE2EDuration="2.764763525s" podCreationTimestamp="2025-12-03 16:54:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:54:21.756971184 +0000 UTC m=+1385.170793072" watchObservedRunningTime="2025-12-03 16:54:21.764763525 +0000 UTC m=+1385.178585423" Dec 03 16:54:21 crc kubenswrapper[5002]: I1203 16:54:21.929361 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-2zspw"] Dec 03 16:54:21 crc kubenswrapper[5002]: W1203 16:54:21.958152 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb405bac2_d301_4132_bd82_a1c7d0b0df6c.slice/crio-035619c9637e2473727f992503f3818e69d88bee38d89b51ed14cc084f16ad5d WatchSource:0}: Error finding container 035619c9637e2473727f992503f3818e69d88bee38d89b51ed14cc084f16ad5d: Status 404 returned error can't find the container with id 035619c9637e2473727f992503f3818e69d88bee38d89b51ed14cc084f16ad5d Dec 03 16:54:22 crc kubenswrapper[5002]: I1203 16:54:22.794431 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-2zspw" event={"ID":"b405bac2-d301-4132-bd82-a1c7d0b0df6c","Type":"ContainerStarted","Data":"c572cd9c1af6f40e03b071df88c96ebaa895f94c1cd6af90c6351a04eb304599"} Dec 03 16:54:22 crc kubenswrapper[5002]: I1203 16:54:22.794940 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-2zspw" event={"ID":"b405bac2-d301-4132-bd82-a1c7d0b0df6c","Type":"ContainerStarted","Data":"035619c9637e2473727f992503f3818e69d88bee38d89b51ed14cc084f16ad5d"} Dec 03 16:54:22 crc kubenswrapper[5002]: I1203 16:54:22.801547 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" event={"ID":"7ca5d77b-a995-41c0-ba36-54a896f63a7a","Type":"ContainerStarted","Data":"494ae1cdce3c8bc209ceb1c212726454d0a091e15dcb7b7aecd3e12a2e90df3e"} Dec 03 16:54:22 crc kubenswrapper[5002]: I1203 16:54:22.801715 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:22 crc kubenswrapper[5002]: I1203 16:54:22.820285 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-2zspw" podStartSLOduration=2.820265348 podStartE2EDuration="2.820265348s" podCreationTimestamp="2025-12-03 16:54:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:54:22.817853112 +0000 UTC m=+1386.231675020" watchObservedRunningTime="2025-12-03 16:54:22.820265348 +0000 UTC m=+1386.234087236" Dec 03 16:54:22 crc kubenswrapper[5002]: I1203 16:54:22.850318 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" podStartSLOduration=2.850298154 podStartE2EDuration="2.850298154s" podCreationTimestamp="2025-12-03 16:54:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:54:22.8457134 +0000 UTC m=+1386.259535288" watchObservedRunningTime="2025-12-03 16:54:22.850298154 +0000 UTC m=+1386.264120042" Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.092836 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.104732 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.831436 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ab82e179-d6cc-4942-9eb9-9b307cdaaff0","Type":"ContainerStarted","Data":"4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca"} Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.831565 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="ab82e179-d6cc-4942-9eb9-9b307cdaaff0" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca" gracePeriod=30 Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.837629 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"91331d30-b807-4f96-b1c6-4ca6be2b36bc","Type":"ContainerStarted","Data":"92b4f9a32c605d1e0da36f62dd918f945f078af9e9735e4bc51b3593e507c219"} Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.850515 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" containerName="nova-metadata-metadata" containerID="cri-o://990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336" gracePeriod=30 Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.850527 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" containerName="nova-metadata-log" containerID="cri-o://49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0" gracePeriod=30 Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.861644 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6203f8d5-2c93-46b0-8e3b-151330a87e92","Type":"ContainerStarted","Data":"1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099"} Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.861723 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6203f8d5-2c93-46b0-8e3b-151330a87e92","Type":"ContainerStarted","Data":"ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b"} Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.861769 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb","Type":"ContainerStarted","Data":"990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336"} Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.861791 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb","Type":"ContainerStarted","Data":"49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0"} Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.876713 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.924131831 podStartE2EDuration="4.876684708s" podCreationTimestamp="2025-12-03 16:54:20 +0000 UTC" firstStartedPulling="2025-12-03 16:54:21.172693049 +0000 UTC m=+1384.586514937" lastFinishedPulling="2025-12-03 16:54:24.125245926 +0000 UTC m=+1387.539067814" observedRunningTime="2025-12-03 16:54:24.860907138 +0000 UTC m=+1388.274729026" watchObservedRunningTime="2025-12-03 16:54:24.876684708 +0000 UTC m=+1388.290506616" Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.882772 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.582690242 podStartE2EDuration="5.882738242s" podCreationTimestamp="2025-12-03 16:54:19 +0000 UTC" firstStartedPulling="2025-12-03 16:54:20.832008802 +0000 UTC m=+1384.245830690" lastFinishedPulling="2025-12-03 16:54:24.132056792 +0000 UTC m=+1387.545878690" observedRunningTime="2025-12-03 16:54:24.8782248 +0000 UTC m=+1388.292046718" watchObservedRunningTime="2025-12-03 16:54:24.882738242 +0000 UTC m=+1388.296560130" Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.897656 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.702542333 podStartE2EDuration="4.897637178s" podCreationTimestamp="2025-12-03 16:54:20 +0000 UTC" firstStartedPulling="2025-12-03 16:54:20.930924273 +0000 UTC m=+1384.344746171" lastFinishedPulling="2025-12-03 16:54:24.126019128 +0000 UTC m=+1387.539841016" observedRunningTime="2025-12-03 16:54:24.895641713 +0000 UTC m=+1388.309463621" watchObservedRunningTime="2025-12-03 16:54:24.897637178 +0000 UTC m=+1388.311459066" Dec 03 16:54:24 crc kubenswrapper[5002]: I1203 16:54:24.923357 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.294155767 podStartE2EDuration="4.923336277s" podCreationTimestamp="2025-12-03 16:54:20 +0000 UTC" firstStartedPulling="2025-12-03 16:54:21.496895439 +0000 UTC m=+1384.910717327" lastFinishedPulling="2025-12-03 16:54:24.126075949 +0000 UTC m=+1387.539897837" observedRunningTime="2025-12-03 16:54:24.919364478 +0000 UTC m=+1388.333186376" watchObservedRunningTime="2025-12-03 16:54:24.923336277 +0000 UTC m=+1388.337158165" Dec 03 16:54:25 crc kubenswrapper[5002]: I1203 16:54:25.367683 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 16:54:25 crc kubenswrapper[5002]: I1203 16:54:25.369323 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 16:54:25 crc kubenswrapper[5002]: I1203 16:54:25.660677 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:25 crc kubenswrapper[5002]: I1203 16:54:25.679952 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 16:54:25 crc kubenswrapper[5002]: I1203 16:54:25.863955 5002 generic.go:334] "Generic (PLEG): container finished" podID="ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" containerID="49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0" exitCode=143 Dec 03 16:54:25 crc kubenswrapper[5002]: I1203 16:54:25.864002 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb","Type":"ContainerDied","Data":"49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0"} Dec 03 16:54:28 crc kubenswrapper[5002]: I1203 16:54:28.901725 5002 generic.go:334] "Generic (PLEG): container finished" podID="db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b" containerID="c5d0ede319ccf188efb27569b868b26ca5f84ea64bed7ce6823e0f0aa90d0477" exitCode=0 Dec 03 16:54:28 crc kubenswrapper[5002]: I1203 16:54:28.901777 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cfw2x" event={"ID":"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b","Type":"ContainerDied","Data":"c5d0ede319ccf188efb27569b868b26ca5f84ea64bed7ce6823e0f0aa90d0477"} Dec 03 16:54:29 crc kubenswrapper[5002]: I1203 16:54:29.922147 5002 generic.go:334] "Generic (PLEG): container finished" podID="b405bac2-d301-4132-bd82-a1c7d0b0df6c" containerID="c572cd9c1af6f40e03b071df88c96ebaa895f94c1cd6af90c6351a04eb304599" exitCode=0 Dec 03 16:54:29 crc kubenswrapper[5002]: I1203 16:54:29.922266 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-2zspw" event={"ID":"b405bac2-d301-4132-bd82-a1c7d0b0df6c","Type":"ContainerDied","Data":"c572cd9c1af6f40e03b071df88c96ebaa895f94c1cd6af90c6351a04eb304599"} Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.294683 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.294760 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.376692 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.545401 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-combined-ca-bundle\") pod \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.546083 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-scripts\") pod \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.546246 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-config-data\") pod \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.546898 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntsmk\" (UniqueName: \"kubernetes.io/projected/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-kube-api-access-ntsmk\") pod \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\" (UID: \"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b\") " Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.554802 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-kube-api-access-ntsmk" (OuterVolumeSpecName: "kube-api-access-ntsmk") pod "db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b" (UID: "db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b"). InnerVolumeSpecName "kube-api-access-ntsmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.555225 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-scripts" (OuterVolumeSpecName: "scripts") pod "db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b" (UID: "db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.593357 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-config-data" (OuterVolumeSpecName: "config-data") pod "db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b" (UID: "db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.596445 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b" (UID: "db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.635875 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.649737 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.649791 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.649802 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.649811 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntsmk\" (UniqueName: \"kubernetes.io/projected/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b-kube-api-access-ntsmk\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.679565 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.730656 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c77d8b67c-bpckg"] Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.730919 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" podUID="abbcb731-b955-4e74-98e3-1ddb1db21986" containerName="dnsmasq-dns" containerID="cri-o://c41e19901492ef72054fcae791fadc714779ad1c6de3ffc8143c85fb719d7d7c" gracePeriod=10 Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.764939 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.946411 5002 generic.go:334] "Generic (PLEG): container finished" podID="abbcb731-b955-4e74-98e3-1ddb1db21986" containerID="c41e19901492ef72054fcae791fadc714779ad1c6de3ffc8143c85fb719d7d7c" exitCode=0 Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.946544 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" event={"ID":"abbcb731-b955-4e74-98e3-1ddb1db21986","Type":"ContainerDied","Data":"c41e19901492ef72054fcae791fadc714779ad1c6de3ffc8143c85fb719d7d7c"} Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.951063 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cfw2x" event={"ID":"db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b","Type":"ContainerDied","Data":"3e9d692b43ed305412343553b0de4f6b8fc0fa3dcc31dd4699d2374e50937476"} Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.951099 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e9d692b43ed305412343553b0de4f6b8fc0fa3dcc31dd4699d2374e50937476" Dec 03 16:54:30 crc kubenswrapper[5002]: I1203 16:54:30.951162 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cfw2x" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.014389 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.200297 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.200941 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6203f8d5-2c93-46b0-8e3b-151330a87e92" containerName="nova-api-api" containerID="cri-o://1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099" gracePeriod=30 Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.200948 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6203f8d5-2c93-46b0-8e3b-151330a87e92" containerName="nova-api-log" containerID="cri-o://ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b" gracePeriod=30 Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.209116 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6203f8d5-2c93-46b0-8e3b-151330a87e92" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": EOF" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.209265 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6203f8d5-2c93-46b0-8e3b-151330a87e92" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": EOF" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.446222 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.459063 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.577142 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.609527 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6m27t\" (UniqueName: \"kubernetes.io/projected/b405bac2-d301-4132-bd82-a1c7d0b0df6c-kube-api-access-6m27t\") pod \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.609627 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-dns-svc\") pod \"abbcb731-b955-4e74-98e3-1ddb1db21986\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.609717 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-ovsdbserver-nb\") pod \"abbcb731-b955-4e74-98e3-1ddb1db21986\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.609778 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-dns-swift-storage-0\") pod \"abbcb731-b955-4e74-98e3-1ddb1db21986\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.609802 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-scripts\") pod \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.609849 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-ovsdbserver-sb\") pod \"abbcb731-b955-4e74-98e3-1ddb1db21986\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.610467 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-config\") pod \"abbcb731-b955-4e74-98e3-1ddb1db21986\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.610507 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjjvg\" (UniqueName: \"kubernetes.io/projected/abbcb731-b955-4e74-98e3-1ddb1db21986-kube-api-access-rjjvg\") pod \"abbcb731-b955-4e74-98e3-1ddb1db21986\" (UID: \"abbcb731-b955-4e74-98e3-1ddb1db21986\") " Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.610547 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-config-data\") pod \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.610567 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-combined-ca-bundle\") pod \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\" (UID: \"b405bac2-d301-4132-bd82-a1c7d0b0df6c\") " Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.622019 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-scripts" (OuterVolumeSpecName: "scripts") pod "b405bac2-d301-4132-bd82-a1c7d0b0df6c" (UID: "b405bac2-d301-4132-bd82-a1c7d0b0df6c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.623036 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abbcb731-b955-4e74-98e3-1ddb1db21986-kube-api-access-rjjvg" (OuterVolumeSpecName: "kube-api-access-rjjvg") pod "abbcb731-b955-4e74-98e3-1ddb1db21986" (UID: "abbcb731-b955-4e74-98e3-1ddb1db21986"). InnerVolumeSpecName "kube-api-access-rjjvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.636021 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b405bac2-d301-4132-bd82-a1c7d0b0df6c-kube-api-access-6m27t" (OuterVolumeSpecName: "kube-api-access-6m27t") pod "b405bac2-d301-4132-bd82-a1c7d0b0df6c" (UID: "b405bac2-d301-4132-bd82-a1c7d0b0df6c"). InnerVolumeSpecName "kube-api-access-6m27t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.691848 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-config-data" (OuterVolumeSpecName: "config-data") pod "b405bac2-d301-4132-bd82-a1c7d0b0df6c" (UID: "b405bac2-d301-4132-bd82-a1c7d0b0df6c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.695528 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "abbcb731-b955-4e74-98e3-1ddb1db21986" (UID: "abbcb731-b955-4e74-98e3-1ddb1db21986"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.703243 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "abbcb731-b955-4e74-98e3-1ddb1db21986" (UID: "abbcb731-b955-4e74-98e3-1ddb1db21986"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.703366 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "abbcb731-b955-4e74-98e3-1ddb1db21986" (UID: "abbcb731-b955-4e74-98e3-1ddb1db21986"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.714235 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b405bac2-d301-4132-bd82-a1c7d0b0df6c" (UID: "b405bac2-d301-4132-bd82-a1c7d0b0df6c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.714381 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.714394 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.714405 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.714417 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjjvg\" (UniqueName: \"kubernetes.io/projected/abbcb731-b955-4e74-98e3-1ddb1db21986-kube-api-access-rjjvg\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.714429 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.714436 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b405bac2-d301-4132-bd82-a1c7d0b0df6c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.714445 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6m27t\" (UniqueName: \"kubernetes.io/projected/b405bac2-d301-4132-bd82-a1c7d0b0df6c-kube-api-access-6m27t\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.714454 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.724433 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-config" (OuterVolumeSpecName: "config") pod "abbcb731-b955-4e74-98e3-1ddb1db21986" (UID: "abbcb731-b955-4e74-98e3-1ddb1db21986"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.736217 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "abbcb731-b955-4e74-98e3-1ddb1db21986" (UID: "abbcb731-b955-4e74-98e3-1ddb1db21986"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.815545 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.815586 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abbcb731-b955-4e74-98e3-1ddb1db21986-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.947795 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.966803 5002 generic.go:334] "Generic (PLEG): container finished" podID="6203f8d5-2c93-46b0-8e3b-151330a87e92" containerID="ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b" exitCode=143 Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.966875 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6203f8d5-2c93-46b0-8e3b-151330a87e92","Type":"ContainerDied","Data":"ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b"} Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.969479 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-2zspw" event={"ID":"b405bac2-d301-4132-bd82-a1c7d0b0df6c","Type":"ContainerDied","Data":"035619c9637e2473727f992503f3818e69d88bee38d89b51ed14cc084f16ad5d"} Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.969505 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="035619c9637e2473727f992503f3818e69d88bee38d89b51ed14cc084f16ad5d" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.969558 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-2zspw" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.975184 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.975462 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c77d8b67c-bpckg" event={"ID":"abbcb731-b955-4e74-98e3-1ddb1db21986","Type":"ContainerDied","Data":"00442de7db9c9528550efc46b221d466e288ca8624e7e91d5c9fb65df8b50b2b"} Dec 03 16:54:31 crc kubenswrapper[5002]: I1203 16:54:31.975500 5002 scope.go:117] "RemoveContainer" containerID="c41e19901492ef72054fcae791fadc714779ad1c6de3ffc8143c85fb719d7d7c" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.032101 5002 scope.go:117] "RemoveContainer" containerID="259f208880e1cc53be0e2e5cb718c302f0e48e2958135f7b69b3439b59bb0c0f" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.045168 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c77d8b67c-bpckg"] Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.059190 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c77d8b67c-bpckg"] Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.095033 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 16:54:32 crc kubenswrapper[5002]: E1203 16:54:32.099390 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abbcb731-b955-4e74-98e3-1ddb1db21986" containerName="dnsmasq-dns" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.099520 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="abbcb731-b955-4e74-98e3-1ddb1db21986" containerName="dnsmasq-dns" Dec 03 16:54:32 crc kubenswrapper[5002]: E1203 16:54:32.099588 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b405bac2-d301-4132-bd82-a1c7d0b0df6c" containerName="nova-cell1-conductor-db-sync" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.099643 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b405bac2-d301-4132-bd82-a1c7d0b0df6c" containerName="nova-cell1-conductor-db-sync" Dec 03 16:54:32 crc kubenswrapper[5002]: E1203 16:54:32.099714 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b" containerName="nova-manage" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.099792 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b" containerName="nova-manage" Dec 03 16:54:32 crc kubenswrapper[5002]: E1203 16:54:32.099854 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abbcb731-b955-4e74-98e3-1ddb1db21986" containerName="init" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.099907 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="abbcb731-b955-4e74-98e3-1ddb1db21986" containerName="init" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.100702 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b405bac2-d301-4132-bd82-a1c7d0b0df6c" containerName="nova-cell1-conductor-db-sync" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.100834 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="abbcb731-b955-4e74-98e3-1ddb1db21986" containerName="dnsmasq-dns" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.100902 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b" containerName="nova-manage" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.101629 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.105388 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.114909 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.131883 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.132113 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.132155 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2t5sh\" (UniqueName: \"kubernetes.io/projected/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-kube-api-access-2t5sh\") pod \"nova-cell1-conductor-0\" (UID: \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.235871 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.236881 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.237090 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2t5sh\" (UniqueName: \"kubernetes.io/projected/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-kube-api-access-2t5sh\") pod \"nova-cell1-conductor-0\" (UID: \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.242284 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.242454 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.254830 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2t5sh\" (UniqueName: \"kubernetes.io/projected/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-kube-api-access-2t5sh\") pod \"nova-cell1-conductor-0\" (UID: \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.438011 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.852782 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abbcb731-b955-4e74-98e3-1ddb1db21986" path="/var/lib/kubelet/pods/abbcb731-b955-4e74-98e3-1ddb1db21986/volumes" Dec 03 16:54:32 crc kubenswrapper[5002]: I1203 16:54:32.944464 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 16:54:33 crc kubenswrapper[5002]: I1203 16:54:33.007106 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"5a3af9fa-d550-4d97-8d54-b198f0ca6f31","Type":"ContainerStarted","Data":"c97b9d8430bb2816df2d936d883512f1eeee34e66c6ab2013595c183883c634a"} Dec 03 16:54:33 crc kubenswrapper[5002]: I1203 16:54:33.026529 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="91331d30-b807-4f96-b1c6-4ca6be2b36bc" containerName="nova-scheduler-scheduler" containerID="cri-o://92b4f9a32c605d1e0da36f62dd918f945f078af9e9735e4bc51b3593e507c219" gracePeriod=30 Dec 03 16:54:34 crc kubenswrapper[5002]: I1203 16:54:34.046950 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"5a3af9fa-d550-4d97-8d54-b198f0ca6f31","Type":"ContainerStarted","Data":"66caf906a5bd8b8162200583f4b593bcda499f2ebcd4fa929fb768cd57da8948"} Dec 03 16:54:34 crc kubenswrapper[5002]: I1203 16:54:34.047435 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:34 crc kubenswrapper[5002]: I1203 16:54:34.076801 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.076786834 podStartE2EDuration="2.076786834s" podCreationTimestamp="2025-12-03 16:54:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:54:34.076140076 +0000 UTC m=+1397.489961964" watchObservedRunningTime="2025-12-03 16:54:34.076786834 +0000 UTC m=+1397.490608722" Dec 03 16:54:35 crc kubenswrapper[5002]: E1203 16:54:35.681613 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="92b4f9a32c605d1e0da36f62dd918f945f078af9e9735e4bc51b3593e507c219" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:54:35 crc kubenswrapper[5002]: E1203 16:54:35.683785 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="92b4f9a32c605d1e0da36f62dd918f945f078af9e9735e4bc51b3593e507c219" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:54:35 crc kubenswrapper[5002]: E1203 16:54:35.686673 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="92b4f9a32c605d1e0da36f62dd918f945f078af9e9735e4bc51b3593e507c219" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:54:35 crc kubenswrapper[5002]: E1203 16:54:35.686791 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="91331d30-b807-4f96-b1c6-4ca6be2b36bc" containerName="nova-scheduler-scheduler" Dec 03 16:54:35 crc kubenswrapper[5002]: I1203 16:54:35.782169 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:54:35 crc kubenswrapper[5002]: I1203 16:54:35.782377 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="84ace4a6-f88f-4b88-8bd9-62440a00df18" containerName="kube-state-metrics" containerID="cri-o://9c56e5ddb6de72464477015888e8cbde28a224dd4bd4a8110ce4845a4f8af4d1" gracePeriod=30 Dec 03 16:54:35 crc kubenswrapper[5002]: I1203 16:54:35.797558 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/kube-state-metrics-0" podUID="84ace4a6-f88f-4b88-8bd9-62440a00df18" containerName="kube-state-metrics" probeResult="failure" output="Get \"http://10.217.0.107:8081/readyz\": dial tcp 10.217.0.107:8081: connect: connection refused" Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.089034 5002 generic.go:334] "Generic (PLEG): container finished" podID="91331d30-b807-4f96-b1c6-4ca6be2b36bc" containerID="92b4f9a32c605d1e0da36f62dd918f945f078af9e9735e4bc51b3593e507c219" exitCode=0 Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.089503 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"91331d30-b807-4f96-b1c6-4ca6be2b36bc","Type":"ContainerDied","Data":"92b4f9a32c605d1e0da36f62dd918f945f078af9e9735e4bc51b3593e507c219"} Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.091025 5002 generic.go:334] "Generic (PLEG): container finished" podID="84ace4a6-f88f-4b88-8bd9-62440a00df18" containerID="9c56e5ddb6de72464477015888e8cbde28a224dd4bd4a8110ce4845a4f8af4d1" exitCode=2 Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.091049 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"84ace4a6-f88f-4b88-8bd9-62440a00df18","Type":"ContainerDied","Data":"9c56e5ddb6de72464477015888e8cbde28a224dd4bd4a8110ce4845a4f8af4d1"} Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.217982 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.316174 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p97rk\" (UniqueName: \"kubernetes.io/projected/91331d30-b807-4f96-b1c6-4ca6be2b36bc-kube-api-access-p97rk\") pod \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\" (UID: \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\") " Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.316313 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91331d30-b807-4f96-b1c6-4ca6be2b36bc-combined-ca-bundle\") pod \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\" (UID: \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\") " Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.316354 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91331d30-b807-4f96-b1c6-4ca6be2b36bc-config-data\") pod \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\" (UID: \"91331d30-b807-4f96-b1c6-4ca6be2b36bc\") " Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.323351 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91331d30-b807-4f96-b1c6-4ca6be2b36bc-kube-api-access-p97rk" (OuterVolumeSpecName: "kube-api-access-p97rk") pod "91331d30-b807-4f96-b1c6-4ca6be2b36bc" (UID: "91331d30-b807-4f96-b1c6-4ca6be2b36bc"). InnerVolumeSpecName "kube-api-access-p97rk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.335876 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.360869 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91331d30-b807-4f96-b1c6-4ca6be2b36bc-config-data" (OuterVolumeSpecName: "config-data") pod "91331d30-b807-4f96-b1c6-4ca6be2b36bc" (UID: "91331d30-b807-4f96-b1c6-4ca6be2b36bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.381794 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91331d30-b807-4f96-b1c6-4ca6be2b36bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91331d30-b807-4f96-b1c6-4ca6be2b36bc" (UID: "91331d30-b807-4f96-b1c6-4ca6be2b36bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.418044 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64lnz\" (UniqueName: \"kubernetes.io/projected/84ace4a6-f88f-4b88-8bd9-62440a00df18-kube-api-access-64lnz\") pod \"84ace4a6-f88f-4b88-8bd9-62440a00df18\" (UID: \"84ace4a6-f88f-4b88-8bd9-62440a00df18\") " Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.418612 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91331d30-b807-4f96-b1c6-4ca6be2b36bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.418630 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91331d30-b807-4f96-b1c6-4ca6be2b36bc-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.418638 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p97rk\" (UniqueName: \"kubernetes.io/projected/91331d30-b807-4f96-b1c6-4ca6be2b36bc-kube-api-access-p97rk\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.422719 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84ace4a6-f88f-4b88-8bd9-62440a00df18-kube-api-access-64lnz" (OuterVolumeSpecName: "kube-api-access-64lnz") pod "84ace4a6-f88f-4b88-8bd9-62440a00df18" (UID: "84ace4a6-f88f-4b88-8bd9-62440a00df18"). InnerVolumeSpecName "kube-api-access-64lnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:36 crc kubenswrapper[5002]: I1203 16:54:36.520664 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64lnz\" (UniqueName: \"kubernetes.io/projected/84ace4a6-f88f-4b88-8bd9-62440a00df18-kube-api-access-64lnz\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.061408 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.102359 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"91331d30-b807-4f96-b1c6-4ca6be2b36bc","Type":"ContainerDied","Data":"54d9a78f3c8bbfd2b3d879e2c76ae607f30e6ce331df0779314980339a07d3a1"} Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.102412 5002 scope.go:117] "RemoveContainer" containerID="92b4f9a32c605d1e0da36f62dd918f945f078af9e9735e4bc51b3593e507c219" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.102445 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.106488 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.106593 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"84ace4a6-f88f-4b88-8bd9-62440a00df18","Type":"ContainerDied","Data":"15aee18692511b6d9da9d6b3d885f9aa3bdd1bb35685566bd8c1890838ac53b6"} Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.112395 5002 generic.go:334] "Generic (PLEG): container finished" podID="6203f8d5-2c93-46b0-8e3b-151330a87e92" containerID="1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099" exitCode=0 Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.112442 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6203f8d5-2c93-46b0-8e3b-151330a87e92","Type":"ContainerDied","Data":"1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099"} Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.112468 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6203f8d5-2c93-46b0-8e3b-151330a87e92","Type":"ContainerDied","Data":"8cd921deb73bf99e14d047a1fa70c4ad06d6b835332dd939276a201621cd7be2"} Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.112517 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.130186 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwrsm\" (UniqueName: \"kubernetes.io/projected/6203f8d5-2c93-46b0-8e3b-151330a87e92-kube-api-access-qwrsm\") pod \"6203f8d5-2c93-46b0-8e3b-151330a87e92\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.174190 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.175150 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6203f8d5-2c93-46b0-8e3b-151330a87e92-kube-api-access-qwrsm" (OuterVolumeSpecName: "kube-api-access-qwrsm") pod "6203f8d5-2c93-46b0-8e3b-151330a87e92" (UID: "6203f8d5-2c93-46b0-8e3b-151330a87e92"). InnerVolumeSpecName "kube-api-access-qwrsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.175656 5002 scope.go:117] "RemoveContainer" containerID="9c56e5ddb6de72464477015888e8cbde28a224dd4bd4a8110ce4845a4f8af4d1" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.193643 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.205241 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: E1203 16:54:37.205699 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6203f8d5-2c93-46b0-8e3b-151330a87e92" containerName="nova-api-api" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.205714 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6203f8d5-2c93-46b0-8e3b-151330a87e92" containerName="nova-api-api" Dec 03 16:54:37 crc kubenswrapper[5002]: E1203 16:54:37.205734 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6203f8d5-2c93-46b0-8e3b-151330a87e92" containerName="nova-api-log" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.205740 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6203f8d5-2c93-46b0-8e3b-151330a87e92" containerName="nova-api-log" Dec 03 16:54:37 crc kubenswrapper[5002]: E1203 16:54:37.205777 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ace4a6-f88f-4b88-8bd9-62440a00df18" containerName="kube-state-metrics" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.205784 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ace4a6-f88f-4b88-8bd9-62440a00df18" containerName="kube-state-metrics" Dec 03 16:54:37 crc kubenswrapper[5002]: E1203 16:54:37.205798 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91331d30-b807-4f96-b1c6-4ca6be2b36bc" containerName="nova-scheduler-scheduler" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.205804 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="91331d30-b807-4f96-b1c6-4ca6be2b36bc" containerName="nova-scheduler-scheduler" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.205977 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ace4a6-f88f-4b88-8bd9-62440a00df18" containerName="kube-state-metrics" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.206005 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="91331d30-b807-4f96-b1c6-4ca6be2b36bc" containerName="nova-scheduler-scheduler" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.206017 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6203f8d5-2c93-46b0-8e3b-151330a87e92" containerName="nova-api-api" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.206028 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6203f8d5-2c93-46b0-8e3b-151330a87e92" containerName="nova-api-log" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.206675 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.212438 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.213881 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.225183 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.232464 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6203f8d5-2c93-46b0-8e3b-151330a87e92-config-data\") pod \"6203f8d5-2c93-46b0-8e3b-151330a87e92\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.232506 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6203f8d5-2c93-46b0-8e3b-151330a87e92-logs\") pod \"6203f8d5-2c93-46b0-8e3b-151330a87e92\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.232550 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6203f8d5-2c93-46b0-8e3b-151330a87e92-combined-ca-bundle\") pod \"6203f8d5-2c93-46b0-8e3b-151330a87e92\" (UID: \"6203f8d5-2c93-46b0-8e3b-151330a87e92\") " Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.233037 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6203f8d5-2c93-46b0-8e3b-151330a87e92-logs" (OuterVolumeSpecName: "logs") pod "6203f8d5-2c93-46b0-8e3b-151330a87e92" (UID: "6203f8d5-2c93-46b0-8e3b-151330a87e92"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.233493 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.233548 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwrsm\" (UniqueName: \"kubernetes.io/projected/6203f8d5-2c93-46b0-8e3b-151330a87e92-kube-api-access-qwrsm\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.233714 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6203f8d5-2c93-46b0-8e3b-151330a87e92-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.242391 5002 scope.go:117] "RemoveContainer" containerID="1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.246770 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.248198 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.252327 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.252549 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.259092 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.266264 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6203f8d5-2c93-46b0-8e3b-151330a87e92-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6203f8d5-2c93-46b0-8e3b-151330a87e92" (UID: "6203f8d5-2c93-46b0-8e3b-151330a87e92"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.272774 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6203f8d5-2c93-46b0-8e3b-151330a87e92-config-data" (OuterVolumeSpecName: "config-data") pod "6203f8d5-2c93-46b0-8e3b-151330a87e92" (UID: "6203f8d5-2c93-46b0-8e3b-151330a87e92"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.278842 5002 scope.go:117] "RemoveContainer" containerID="ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.324445 5002 scope.go:117] "RemoveContainer" containerID="1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099" Dec 03 16:54:37 crc kubenswrapper[5002]: E1203 16:54:37.324796 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099\": container with ID starting with 1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099 not found: ID does not exist" containerID="1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.324824 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099"} err="failed to get container status \"1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099\": rpc error: code = NotFound desc = could not find container \"1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099\": container with ID starting with 1549d12234cfa46d3bd087ea9e0d901fa411cef9e6db3af3efe544b02e6df099 not found: ID does not exist" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.324846 5002 scope.go:117] "RemoveContainer" containerID="ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b" Dec 03 16:54:37 crc kubenswrapper[5002]: E1203 16:54:37.325066 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b\": container with ID starting with ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b not found: ID does not exist" containerID="ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.325081 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b"} err="failed to get container status \"ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b\": rpc error: code = NotFound desc = could not find container \"ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b\": container with ID starting with ba937dbd9a7f0b52a93164a137648dc76a5be0703839c471963fbd00ee9c285b not found: ID does not exist" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.335385 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7fdf904-293d-4be8-a8ea-01aba8494aa5-config-data\") pod \"nova-scheduler-0\" (UID: \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.335538 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9kbt\" (UniqueName: \"kubernetes.io/projected/e7fdf904-293d-4be8-a8ea-01aba8494aa5-kube-api-access-p9kbt\") pod \"nova-scheduler-0\" (UID: \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.335590 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7fdf904-293d-4be8-a8ea-01aba8494aa5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.335733 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6203f8d5-2c93-46b0-8e3b-151330a87e92-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.335819 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6203f8d5-2c93-46b0-8e3b-151330a87e92-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.437726 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9kbt\" (UniqueName: \"kubernetes.io/projected/e7fdf904-293d-4be8-a8ea-01aba8494aa5-kube-api-access-p9kbt\") pod \"nova-scheduler-0\" (UID: \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.437899 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.438019 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7fdf904-293d-4be8-a8ea-01aba8494aa5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.438099 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nxsv\" (UniqueName: \"kubernetes.io/projected/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-api-access-2nxsv\") pod \"kube-state-metrics-0\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.438205 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7fdf904-293d-4be8-a8ea-01aba8494aa5-config-data\") pod \"nova-scheduler-0\" (UID: \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.438334 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.438413 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.443446 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7fdf904-293d-4be8-a8ea-01aba8494aa5-config-data\") pod \"nova-scheduler-0\" (UID: \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.444006 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7fdf904-293d-4be8-a8ea-01aba8494aa5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.458109 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.461376 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9kbt\" (UniqueName: \"kubernetes.io/projected/e7fdf904-293d-4be8-a8ea-01aba8494aa5-kube-api-access-p9kbt\") pod \"nova-scheduler-0\" (UID: \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\") " pod="openstack/nova-scheduler-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.471137 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.482711 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.485563 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.489507 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.502167 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.535681 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.540362 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c96d8154-fb63-48a7-bcdd-257582c6e458-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.540446 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.540526 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nxsv\" (UniqueName: \"kubernetes.io/projected/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-api-access-2nxsv\") pod \"kube-state-metrics-0\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.540559 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c96d8154-fb63-48a7-bcdd-257582c6e458-config-data\") pod \"nova-api-0\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.540641 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c96d8154-fb63-48a7-bcdd-257582c6e458-logs\") pod \"nova-api-0\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.540706 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.540743 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.540859 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfslq\" (UniqueName: \"kubernetes.io/projected/c96d8154-fb63-48a7-bcdd-257582c6e458-kube-api-access-gfslq\") pod \"nova-api-0\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.545164 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.546859 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.547584 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.574901 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nxsv\" (UniqueName: \"kubernetes.io/projected/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-api-access-2nxsv\") pod \"kube-state-metrics-0\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.580251 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.580530 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="ceilometer-central-agent" containerID="cri-o://c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64" gracePeriod=30 Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.580823 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="proxy-httpd" containerID="cri-o://22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034" gracePeriod=30 Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.580874 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="sg-core" containerID="cri-o://b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec" gracePeriod=30 Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.580877 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="ceilometer-notification-agent" containerID="cri-o://d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28" gracePeriod=30 Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.619799 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.641958 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfslq\" (UniqueName: \"kubernetes.io/projected/c96d8154-fb63-48a7-bcdd-257582c6e458-kube-api-access-gfslq\") pod \"nova-api-0\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.642013 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c96d8154-fb63-48a7-bcdd-257582c6e458-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.642068 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c96d8154-fb63-48a7-bcdd-257582c6e458-config-data\") pod \"nova-api-0\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.642118 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c96d8154-fb63-48a7-bcdd-257582c6e458-logs\") pod \"nova-api-0\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.642755 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c96d8154-fb63-48a7-bcdd-257582c6e458-logs\") pod \"nova-api-0\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.645702 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c96d8154-fb63-48a7-bcdd-257582c6e458-config-data\") pod \"nova-api-0\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.646353 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c96d8154-fb63-48a7-bcdd-257582c6e458-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.660726 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfslq\" (UniqueName: \"kubernetes.io/projected/c96d8154-fb63-48a7-bcdd-257582c6e458-kube-api-access-gfslq\") pod \"nova-api-0\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " pod="openstack/nova-api-0" Dec 03 16:54:37 crc kubenswrapper[5002]: I1203 16:54:37.815519 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.082007 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:54:38 crc kubenswrapper[5002]: W1203 16:54:38.085686 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7fdf904_293d_4be8_a8ea_01aba8494aa5.slice/crio-30132d31aca3887e47f7869b8050ef84f5ea73786f4176e62073dbbb358aa392 WatchSource:0}: Error finding container 30132d31aca3887e47f7869b8050ef84f5ea73786f4176e62073dbbb358aa392: Status 404 returned error can't find the container with id 30132d31aca3887e47f7869b8050ef84f5ea73786f4176e62073dbbb358aa392 Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.130245 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e7fdf904-293d-4be8-a8ea-01aba8494aa5","Type":"ContainerStarted","Data":"30132d31aca3887e47f7869b8050ef84f5ea73786f4176e62073dbbb358aa392"} Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.137328 5002 generic.go:334] "Generic (PLEG): container finished" podID="a505c174-5d4d-41d8-8ec1-040fda970026" containerID="22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034" exitCode=0 Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.137369 5002 generic.go:334] "Generic (PLEG): container finished" podID="a505c174-5d4d-41d8-8ec1-040fda970026" containerID="b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec" exitCode=2 Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.137380 5002 generic.go:334] "Generic (PLEG): container finished" podID="a505c174-5d4d-41d8-8ec1-040fda970026" containerID="c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64" exitCode=0 Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.137405 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a505c174-5d4d-41d8-8ec1-040fda970026","Type":"ContainerDied","Data":"22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034"} Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.137435 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a505c174-5d4d-41d8-8ec1-040fda970026","Type":"ContainerDied","Data":"b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec"} Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.137450 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a505c174-5d4d-41d8-8ec1-040fda970026","Type":"ContainerDied","Data":"c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64"} Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.182497 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.274476 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:54:38 crc kubenswrapper[5002]: W1203 16:54:38.278193 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc96d8154_fb63_48a7_bcdd_257582c6e458.slice/crio-b2f19e694f122e85fef3e25c8388d28754c9695d1275ee75b27fe7f4ffffb11f WatchSource:0}: Error finding container b2f19e694f122e85fef3e25c8388d28754c9695d1275ee75b27fe7f4ffffb11f: Status 404 returned error can't find the container with id b2f19e694f122e85fef3e25c8388d28754c9695d1275ee75b27fe7f4ffffb11f Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.851722 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6203f8d5-2c93-46b0-8e3b-151330a87e92" path="/var/lib/kubelet/pods/6203f8d5-2c93-46b0-8e3b-151330a87e92/volumes" Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.852739 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84ace4a6-f88f-4b88-8bd9-62440a00df18" path="/var/lib/kubelet/pods/84ace4a6-f88f-4b88-8bd9-62440a00df18/volumes" Dec 03 16:54:38 crc kubenswrapper[5002]: I1203 16:54:38.853512 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91331d30-b807-4f96-b1c6-4ca6be2b36bc" path="/var/lib/kubelet/pods/91331d30-b807-4f96-b1c6-4ca6be2b36bc/volumes" Dec 03 16:54:39 crc kubenswrapper[5002]: I1203 16:54:39.156264 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e7fdf904-293d-4be8-a8ea-01aba8494aa5","Type":"ContainerStarted","Data":"225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596"} Dec 03 16:54:39 crc kubenswrapper[5002]: I1203 16:54:39.160162 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c96d8154-fb63-48a7-bcdd-257582c6e458","Type":"ContainerStarted","Data":"3a6bc44f51ab701f3337d6dc27d69715e9a69555db46a09cf7250349b253be78"} Dec 03 16:54:39 crc kubenswrapper[5002]: I1203 16:54:39.160209 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c96d8154-fb63-48a7-bcdd-257582c6e458","Type":"ContainerStarted","Data":"191ff587e9eab4cb2a56c3199cde5cee6c4e7ea889229f2d9549884f14e893fb"} Dec 03 16:54:39 crc kubenswrapper[5002]: I1203 16:54:39.160228 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c96d8154-fb63-48a7-bcdd-257582c6e458","Type":"ContainerStarted","Data":"b2f19e694f122e85fef3e25c8388d28754c9695d1275ee75b27fe7f4ffffb11f"} Dec 03 16:54:39 crc kubenswrapper[5002]: I1203 16:54:39.163574 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bea5a03b-519f-4af4-873a-e5e7f9e8f769","Type":"ContainerStarted","Data":"4e5b334c5a3bbdfab01571abbbf387660f52e776af1bc465c880770ae52c4855"} Dec 03 16:54:39 crc kubenswrapper[5002]: I1203 16:54:39.163612 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bea5a03b-519f-4af4-873a-e5e7f9e8f769","Type":"ContainerStarted","Data":"37c8a1a7bef3f640b4ba04007ab8fe123e7dd234e6143f92b901a859c32cfca3"} Dec 03 16:54:39 crc kubenswrapper[5002]: I1203 16:54:39.164101 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 16:54:39 crc kubenswrapper[5002]: I1203 16:54:39.208052 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.208023746 podStartE2EDuration="2.208023746s" podCreationTimestamp="2025-12-03 16:54:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:54:39.185185945 +0000 UTC m=+1402.599007833" watchObservedRunningTime="2025-12-03 16:54:39.208023746 +0000 UTC m=+1402.621845654" Dec 03 16:54:39 crc kubenswrapper[5002]: I1203 16:54:39.211344 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.211334707 podStartE2EDuration="2.211334707s" podCreationTimestamp="2025-12-03 16:54:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:54:39.209373093 +0000 UTC m=+1402.623194981" watchObservedRunningTime="2025-12-03 16:54:39.211334707 +0000 UTC m=+1402.625156605" Dec 03 16:54:39 crc kubenswrapper[5002]: I1203 16:54:39.236320 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.8935458920000001 podStartE2EDuration="2.236306346s" podCreationTimestamp="2025-12-03 16:54:37 +0000 UTC" firstStartedPulling="2025-12-03 16:54:38.184507964 +0000 UTC m=+1401.598329862" lastFinishedPulling="2025-12-03 16:54:38.527268418 +0000 UTC m=+1401.941090316" observedRunningTime="2025-12-03 16:54:39.225031938 +0000 UTC m=+1402.638853826" watchObservedRunningTime="2025-12-03 16:54:39.236306346 +0000 UTC m=+1402.650128234" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.193682 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.209992 5002 generic.go:334] "Generic (PLEG): container finished" podID="a505c174-5d4d-41d8-8ec1-040fda970026" containerID="d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28" exitCode=0 Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.210107 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.210210 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a505c174-5d4d-41d8-8ec1-040fda970026","Type":"ContainerDied","Data":"d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28"} Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.210286 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a505c174-5d4d-41d8-8ec1-040fda970026","Type":"ContainerDied","Data":"921393ea0ecf713056852f2639c981f8fac47ac0a4c8052122f093994c086bb8"} Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.210318 5002 scope.go:117] "RemoveContainer" containerID="22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.238660 5002 scope.go:117] "RemoveContainer" containerID="b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.261651 5002 scope.go:117] "RemoveContainer" containerID="d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.281432 5002 scope.go:117] "RemoveContainer" containerID="c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.296521 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-sg-core-conf-yaml\") pod \"a505c174-5d4d-41d8-8ec1-040fda970026\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.296695 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-config-data\") pod \"a505c174-5d4d-41d8-8ec1-040fda970026\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.296983 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-combined-ca-bundle\") pod \"a505c174-5d4d-41d8-8ec1-040fda970026\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.297814 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a505c174-5d4d-41d8-8ec1-040fda970026-log-httpd\") pod \"a505c174-5d4d-41d8-8ec1-040fda970026\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.297982 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mck8w\" (UniqueName: \"kubernetes.io/projected/a505c174-5d4d-41d8-8ec1-040fda970026-kube-api-access-mck8w\") pod \"a505c174-5d4d-41d8-8ec1-040fda970026\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.298077 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-scripts\") pod \"a505c174-5d4d-41d8-8ec1-040fda970026\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.298131 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a505c174-5d4d-41d8-8ec1-040fda970026-run-httpd\") pod \"a505c174-5d4d-41d8-8ec1-040fda970026\" (UID: \"a505c174-5d4d-41d8-8ec1-040fda970026\") " Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.299396 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a505c174-5d4d-41d8-8ec1-040fda970026-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a505c174-5d4d-41d8-8ec1-040fda970026" (UID: "a505c174-5d4d-41d8-8ec1-040fda970026"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.299975 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a505c174-5d4d-41d8-8ec1-040fda970026-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a505c174-5d4d-41d8-8ec1-040fda970026" (UID: "a505c174-5d4d-41d8-8ec1-040fda970026"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.309317 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a505c174-5d4d-41d8-8ec1-040fda970026-kube-api-access-mck8w" (OuterVolumeSpecName: "kube-api-access-mck8w") pod "a505c174-5d4d-41d8-8ec1-040fda970026" (UID: "a505c174-5d4d-41d8-8ec1-040fda970026"). InnerVolumeSpecName "kube-api-access-mck8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.317205 5002 scope.go:117] "RemoveContainer" containerID="22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034" Dec 03 16:54:40 crc kubenswrapper[5002]: E1203 16:54:40.317814 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034\": container with ID starting with 22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034 not found: ID does not exist" containerID="22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.317858 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034"} err="failed to get container status \"22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034\": rpc error: code = NotFound desc = could not find container \"22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034\": container with ID starting with 22048bf5840d59ecf5b36415b761a8e64584fa7f27feaf87304470151b523034 not found: ID does not exist" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.317887 5002 scope.go:117] "RemoveContainer" containerID="b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec" Dec 03 16:54:40 crc kubenswrapper[5002]: E1203 16:54:40.318345 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec\": container with ID starting with b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec not found: ID does not exist" containerID="b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.318368 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec"} err="failed to get container status \"b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec\": rpc error: code = NotFound desc = could not find container \"b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec\": container with ID starting with b86d3ca5e6d84868281165e5962a96101ca2f14b6f1394ee36d98b64937486ec not found: ID does not exist" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.318386 5002 scope.go:117] "RemoveContainer" containerID="d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28" Dec 03 16:54:40 crc kubenswrapper[5002]: E1203 16:54:40.318985 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28\": container with ID starting with d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28 not found: ID does not exist" containerID="d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.319021 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28"} err="failed to get container status \"d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28\": rpc error: code = NotFound desc = could not find container \"d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28\": container with ID starting with d50958a46f8f80c141d7abda6bd0aea0d574e0e5ef65a86ae774e67fb4518d28 not found: ID does not exist" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.319043 5002 scope.go:117] "RemoveContainer" containerID="c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64" Dec 03 16:54:40 crc kubenswrapper[5002]: E1203 16:54:40.319321 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64\": container with ID starting with c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64 not found: ID does not exist" containerID="c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.319349 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64"} err="failed to get container status \"c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64\": rpc error: code = NotFound desc = could not find container \"c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64\": container with ID starting with c129674dfccb72a08557766b65eb6647a4dea19ae5a46e8dbd69ff5950ac0c64 not found: ID does not exist" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.330623 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a505c174-5d4d-41d8-8ec1-040fda970026" (UID: "a505c174-5d4d-41d8-8ec1-040fda970026"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.333252 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-scripts" (OuterVolumeSpecName: "scripts") pod "a505c174-5d4d-41d8-8ec1-040fda970026" (UID: "a505c174-5d4d-41d8-8ec1-040fda970026"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.390098 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a505c174-5d4d-41d8-8ec1-040fda970026" (UID: "a505c174-5d4d-41d8-8ec1-040fda970026"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.401432 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.401469 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.401482 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a505c174-5d4d-41d8-8ec1-040fda970026-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.401495 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mck8w\" (UniqueName: \"kubernetes.io/projected/a505c174-5d4d-41d8-8ec1-040fda970026-kube-api-access-mck8w\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.401505 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.401516 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a505c174-5d4d-41d8-8ec1-040fda970026-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.426094 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-config-data" (OuterVolumeSpecName: "config-data") pod "a505c174-5d4d-41d8-8ec1-040fda970026" (UID: "a505c174-5d4d-41d8-8ec1-040fda970026"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.502865 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a505c174-5d4d-41d8-8ec1-040fda970026-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.568809 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.581319 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.594055 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:54:40 crc kubenswrapper[5002]: E1203 16:54:40.594601 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="sg-core" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.594625 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="sg-core" Dec 03 16:54:40 crc kubenswrapper[5002]: E1203 16:54:40.594653 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="ceilometer-central-agent" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.594660 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="ceilometer-central-agent" Dec 03 16:54:40 crc kubenswrapper[5002]: E1203 16:54:40.594675 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="proxy-httpd" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.594682 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="proxy-httpd" Dec 03 16:54:40 crc kubenswrapper[5002]: E1203 16:54:40.594696 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="ceilometer-notification-agent" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.594702 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="ceilometer-notification-agent" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.594903 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="ceilometer-notification-agent" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.594921 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="ceilometer-central-agent" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.594937 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="sg-core" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.594949 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" containerName="proxy-httpd" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.596633 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.599472 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.599953 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.600158 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.625256 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.706415 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-scripts\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.706460 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.706484 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.706530 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q756\" (UniqueName: \"kubernetes.io/projected/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-kube-api-access-6q756\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.706584 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-run-httpd\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.706621 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.706641 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-log-httpd\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.706868 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-config-data\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.808861 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.809256 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-log-httpd\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.809452 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-config-data\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.809646 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-scripts\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.809887 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.809994 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.809910 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-log-httpd\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.810099 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q756\" (UniqueName: \"kubernetes.io/projected/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-kube-api-access-6q756\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.810568 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-run-httpd\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.810980 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-run-httpd\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.815091 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-config-data\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.815444 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.815528 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.815720 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-scripts\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.817105 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.832020 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q756\" (UniqueName: \"kubernetes.io/projected/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-kube-api-access-6q756\") pod \"ceilometer-0\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " pod="openstack/ceilometer-0" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.852719 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a505c174-5d4d-41d8-8ec1-040fda970026" path="/var/lib/kubelet/pods/a505c174-5d4d-41d8-8ec1-040fda970026/volumes" Dec 03 16:54:40 crc kubenswrapper[5002]: I1203 16:54:40.924344 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:54:41 crc kubenswrapper[5002]: I1203 16:54:41.410650 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:54:42 crc kubenswrapper[5002]: I1203 16:54:42.239111 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31","Type":"ContainerStarted","Data":"7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2"} Dec 03 16:54:42 crc kubenswrapper[5002]: I1203 16:54:42.239778 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31","Type":"ContainerStarted","Data":"b0562746dd1dc756f237fcc1cab1f4e719b2a5b1ad6607996648583bd4c871c7"} Dec 03 16:54:42 crc kubenswrapper[5002]: I1203 16:54:42.495320 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 03 16:54:42 crc kubenswrapper[5002]: I1203 16:54:42.536459 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 16:54:43 crc kubenswrapper[5002]: I1203 16:54:43.253074 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31","Type":"ContainerStarted","Data":"34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d"} Dec 03 16:54:44 crc kubenswrapper[5002]: I1203 16:54:44.266923 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31","Type":"ContainerStarted","Data":"d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2"} Dec 03 16:54:46 crc kubenswrapper[5002]: I1203 16:54:46.293401 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31","Type":"ContainerStarted","Data":"65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b"} Dec 03 16:54:46 crc kubenswrapper[5002]: I1203 16:54:46.296384 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:54:46 crc kubenswrapper[5002]: I1203 16:54:46.331001 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.612604239 podStartE2EDuration="6.330973628s" podCreationTimestamp="2025-12-03 16:54:40 +0000 UTC" firstStartedPulling="2025-12-03 16:54:41.418925698 +0000 UTC m=+1404.832747606" lastFinishedPulling="2025-12-03 16:54:45.137295107 +0000 UTC m=+1408.551116995" observedRunningTime="2025-12-03 16:54:46.31853434 +0000 UTC m=+1409.732356228" watchObservedRunningTime="2025-12-03 16:54:46.330973628 +0000 UTC m=+1409.744795516" Dec 03 16:54:47 crc kubenswrapper[5002]: I1203 16:54:47.536096 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 16:54:47 crc kubenswrapper[5002]: I1203 16:54:47.589298 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 16:54:47 crc kubenswrapper[5002]: I1203 16:54:47.630546 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 16:54:47 crc kubenswrapper[5002]: I1203 16:54:47.816560 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:54:47 crc kubenswrapper[5002]: I1203 16:54:47.817063 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:54:48 crc kubenswrapper[5002]: I1203 16:54:48.362822 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 16:54:48 crc kubenswrapper[5002]: I1203 16:54:48.899016 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c96d8154-fb63-48a7-bcdd-257582c6e458" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.190:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 16:54:48 crc kubenswrapper[5002]: I1203 16:54:48.900633 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c96d8154-fb63-48a7-bcdd-257582c6e458" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.190:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 16:54:50 crc kubenswrapper[5002]: I1203 16:54:50.917585 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:54:50 crc kubenswrapper[5002]: I1203 16:54:50.918165 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.355161 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.366312 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.429450 5002 generic.go:334] "Generic (PLEG): container finished" podID="ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" containerID="990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336" exitCode=137 Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.429524 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb","Type":"ContainerDied","Data":"990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336"} Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.429556 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb","Type":"ContainerDied","Data":"8db8880184a64222899c81c3c8b536bd444ca369f57504c0ee51b10a30cc65c0"} Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.429574 5002 scope.go:117] "RemoveContainer" containerID="990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.429701 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.435473 5002 generic.go:334] "Generic (PLEG): container finished" podID="ab82e179-d6cc-4942-9eb9-9b307cdaaff0" containerID="4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca" exitCode=137 Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.435509 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ab82e179-d6cc-4942-9eb9-9b307cdaaff0","Type":"ContainerDied","Data":"4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca"} Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.435535 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ab82e179-d6cc-4942-9eb9-9b307cdaaff0","Type":"ContainerDied","Data":"370dffc6fef6f2275b437bab9278fe2365871f426207277ea17b6e8c1f7420b2"} Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.435554 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.459510 5002 scope.go:117] "RemoveContainer" containerID="49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.481279 5002 scope.go:117] "RemoveContainer" containerID="990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336" Dec 03 16:54:55 crc kubenswrapper[5002]: E1203 16:54:55.481686 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336\": container with ID starting with 990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336 not found: ID does not exist" containerID="990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.481717 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336"} err="failed to get container status \"990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336\": rpc error: code = NotFound desc = could not find container \"990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336\": container with ID starting with 990c0e732c82c2c180e25af0eb01e76421ea550816ec8a2ddc78402d27db9336 not found: ID does not exist" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.481738 5002 scope.go:117] "RemoveContainer" containerID="49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0" Dec 03 16:54:55 crc kubenswrapper[5002]: E1203 16:54:55.482082 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0\": container with ID starting with 49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0 not found: ID does not exist" containerID="49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.482177 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0"} err="failed to get container status \"49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0\": rpc error: code = NotFound desc = could not find container \"49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0\": container with ID starting with 49184e516ecfdfd6f9168c20e80302fd21c4b4695191d991aeb78ac91a6290a0 not found: ID does not exist" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.482262 5002 scope.go:117] "RemoveContainer" containerID="4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.506602 5002 scope.go:117] "RemoveContainer" containerID="4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca" Dec 03 16:54:55 crc kubenswrapper[5002]: E1203 16:54:55.507092 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca\": container with ID starting with 4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca not found: ID does not exist" containerID="4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.507151 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca"} err="failed to get container status \"4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca\": rpc error: code = NotFound desc = could not find container \"4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca\": container with ID starting with 4bc74beded1128424b8e07436a38fe9266e88dcbe6e272acf5d94dde4a64beca not found: ID does not exist" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.547538 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gwgp\" (UniqueName: \"kubernetes.io/projected/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-kube-api-access-8gwgp\") pod \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\" (UID: \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\") " Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.547681 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-config-data\") pod \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\" (UID: \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\") " Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.547737 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-config-data\") pod \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.548512 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-logs\") pod \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.548577 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-combined-ca-bundle\") pod \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.548695 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-combined-ca-bundle\") pod \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\" (UID: \"ab82e179-d6cc-4942-9eb9-9b307cdaaff0\") " Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.548963 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9b4fh\" (UniqueName: \"kubernetes.io/projected/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-kube-api-access-9b4fh\") pod \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\" (UID: \"ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb\") " Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.549035 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-logs" (OuterVolumeSpecName: "logs") pod "ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" (UID: "ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.550041 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.553677 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-kube-api-access-8gwgp" (OuterVolumeSpecName: "kube-api-access-8gwgp") pod "ab82e179-d6cc-4942-9eb9-9b307cdaaff0" (UID: "ab82e179-d6cc-4942-9eb9-9b307cdaaff0"). InnerVolumeSpecName "kube-api-access-8gwgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.553894 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-kube-api-access-9b4fh" (OuterVolumeSpecName: "kube-api-access-9b4fh") pod "ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" (UID: "ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb"). InnerVolumeSpecName "kube-api-access-9b4fh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.581106 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-config-data" (OuterVolumeSpecName: "config-data") pod "ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" (UID: "ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.587040 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab82e179-d6cc-4942-9eb9-9b307cdaaff0" (UID: "ab82e179-d6cc-4942-9eb9-9b307cdaaff0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.596913 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-config-data" (OuterVolumeSpecName: "config-data") pod "ab82e179-d6cc-4942-9eb9-9b307cdaaff0" (UID: "ab82e179-d6cc-4942-9eb9-9b307cdaaff0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.602256 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" (UID: "ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.652242 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.652274 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.652284 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9b4fh\" (UniqueName: \"kubernetes.io/projected/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-kube-api-access-9b4fh\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.652296 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gwgp\" (UniqueName: \"kubernetes.io/projected/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-kube-api-access-8gwgp\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.652306 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab82e179-d6cc-4942-9eb9-9b307cdaaff0-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.652313 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.785133 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.807993 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.820911 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.833284 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.843626 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:54:55 crc kubenswrapper[5002]: E1203 16:54:55.844135 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" containerName="nova-metadata-metadata" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.844159 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" containerName="nova-metadata-metadata" Dec 03 16:54:55 crc kubenswrapper[5002]: E1203 16:54:55.844190 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" containerName="nova-metadata-log" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.844199 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" containerName="nova-metadata-log" Dec 03 16:54:55 crc kubenswrapper[5002]: E1203 16:54:55.844226 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab82e179-d6cc-4942-9eb9-9b307cdaaff0" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.844235 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab82e179-d6cc-4942-9eb9-9b307cdaaff0" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.844481 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab82e179-d6cc-4942-9eb9-9b307cdaaff0" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.844505 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" containerName="nova-metadata-log" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.844531 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" containerName="nova-metadata-metadata" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.845814 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.849684 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.849905 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.851858 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.853156 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.854676 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.855014 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.858383 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.860798 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.874870 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.958556 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.958648 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.958765 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mp2j\" (UniqueName: \"kubernetes.io/projected/46f912d8-a71e-4c36-93cd-7dbab1c9558a-kube-api-access-6mp2j\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.958799 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.958827 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.959097 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46f912d8-a71e-4c36-93cd-7dbab1c9558a-logs\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.959336 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5t44c\" (UniqueName: \"kubernetes.io/projected/922c692b-3d5c-45df-862d-d4e08b06fe0b-kube-api-access-5t44c\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.959500 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.959550 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-config-data\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:55 crc kubenswrapper[5002]: I1203 16:54:55.959573 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.061920 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5t44c\" (UniqueName: \"kubernetes.io/projected/922c692b-3d5c-45df-862d-d4e08b06fe0b-kube-api-access-5t44c\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.062591 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.062904 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-config-data\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.063689 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.063974 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.064221 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.065743 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mp2j\" (UniqueName: \"kubernetes.io/projected/46f912d8-a71e-4c36-93cd-7dbab1c9558a-kube-api-access-6mp2j\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.065974 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.066132 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.066386 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46f912d8-a71e-4c36-93cd-7dbab1c9558a-logs\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.068048 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46f912d8-a71e-4c36-93cd-7dbab1c9558a-logs\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.070240 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-config-data\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.074448 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.074580 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.075092 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.075151 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.075344 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.080502 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.091876 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mp2j\" (UniqueName: \"kubernetes.io/projected/46f912d8-a71e-4c36-93cd-7dbab1c9558a-kube-api-access-6mp2j\") pod \"nova-metadata-0\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " pod="openstack/nova-metadata-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.091879 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5t44c\" (UniqueName: \"kubernetes.io/projected/922c692b-3d5c-45df-862d-d4e08b06fe0b-kube-api-access-5t44c\") pod \"nova-cell1-novncproxy-0\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.168583 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.180873 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.538839 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.718631 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:54:56 crc kubenswrapper[5002]: W1203 16:54:56.718690 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46f912d8_a71e_4c36_93cd_7dbab1c9558a.slice/crio-805ddf530c513a3ef8319cc1d92dec2338143dbe2199612d4a7ab671932d9e4d WatchSource:0}: Error finding container 805ddf530c513a3ef8319cc1d92dec2338143dbe2199612d4a7ab671932d9e4d: Status 404 returned error can't find the container with id 805ddf530c513a3ef8319cc1d92dec2338143dbe2199612d4a7ab671932d9e4d Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.853639 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab82e179-d6cc-4942-9eb9-9b307cdaaff0" path="/var/lib/kubelet/pods/ab82e179-d6cc-4942-9eb9-9b307cdaaff0/volumes" Dec 03 16:54:56 crc kubenswrapper[5002]: I1203 16:54:56.854728 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb" path="/var/lib/kubelet/pods/ed6aac92-4b01-4496-81ae-1aa9d1e6e8fb/volumes" Dec 03 16:54:57 crc kubenswrapper[5002]: I1203 16:54:57.471284 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"46f912d8-a71e-4c36-93cd-7dbab1c9558a","Type":"ContainerStarted","Data":"2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9"} Dec 03 16:54:57 crc kubenswrapper[5002]: I1203 16:54:57.471851 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"46f912d8-a71e-4c36-93cd-7dbab1c9558a","Type":"ContainerStarted","Data":"0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12"} Dec 03 16:54:57 crc kubenswrapper[5002]: I1203 16:54:57.471878 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"46f912d8-a71e-4c36-93cd-7dbab1c9558a","Type":"ContainerStarted","Data":"805ddf530c513a3ef8319cc1d92dec2338143dbe2199612d4a7ab671932d9e4d"} Dec 03 16:54:57 crc kubenswrapper[5002]: I1203 16:54:57.474288 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"922c692b-3d5c-45df-862d-d4e08b06fe0b","Type":"ContainerStarted","Data":"b7977fd2fc849375b5f24a1f92f0acd219b3d1bff05f0b2a40d3322e005c9013"} Dec 03 16:54:57 crc kubenswrapper[5002]: I1203 16:54:57.474375 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"922c692b-3d5c-45df-862d-d4e08b06fe0b","Type":"ContainerStarted","Data":"15e428211cbb8116af4bedac9722e645f9afc19ec6e413fd937a91a0fcbadc81"} Dec 03 16:54:57 crc kubenswrapper[5002]: I1203 16:54:57.501206 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.501188176 podStartE2EDuration="2.501188176s" podCreationTimestamp="2025-12-03 16:54:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:54:57.495363247 +0000 UTC m=+1420.909185205" watchObservedRunningTime="2025-12-03 16:54:57.501188176 +0000 UTC m=+1420.915010064" Dec 03 16:54:57 crc kubenswrapper[5002]: I1203 16:54:57.521873 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.521779036 podStartE2EDuration="2.521779036s" podCreationTimestamp="2025-12-03 16:54:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:54:57.515428453 +0000 UTC m=+1420.929250371" watchObservedRunningTime="2025-12-03 16:54:57.521779036 +0000 UTC m=+1420.935600924" Dec 03 16:54:57 crc kubenswrapper[5002]: I1203 16:54:57.820740 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 16:54:57 crc kubenswrapper[5002]: I1203 16:54:57.821498 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 16:54:57 crc kubenswrapper[5002]: I1203 16:54:57.822605 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 16:54:57 crc kubenswrapper[5002]: I1203 16:54:57.826224 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 16:54:58 crc kubenswrapper[5002]: I1203 16:54:58.483770 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 16:54:58 crc kubenswrapper[5002]: I1203 16:54:58.490836 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 16:54:58 crc kubenswrapper[5002]: I1203 16:54:58.736839 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9cbcb645-dw9nm"] Dec 03 16:54:58 crc kubenswrapper[5002]: I1203 16:54:58.757138 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:58 crc kubenswrapper[5002]: I1203 16:54:58.788465 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9cbcb645-dw9nm"] Dec 03 16:54:58 crc kubenswrapper[5002]: I1203 16:54:58.942559 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:58 crc kubenswrapper[5002]: I1203 16:54:58.943663 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:58 crc kubenswrapper[5002]: I1203 16:54:58.943695 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r57xj\" (UniqueName: \"kubernetes.io/projected/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-kube-api-access-r57xj\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:58 crc kubenswrapper[5002]: I1203 16:54:58.943962 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:58 crc kubenswrapper[5002]: I1203 16:54:58.944036 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-dns-svc\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:58 crc kubenswrapper[5002]: I1203 16:54:58.944066 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-config\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.046365 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.046461 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-dns-svc\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.046490 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-config\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.046520 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.046543 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.046565 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r57xj\" (UniqueName: \"kubernetes.io/projected/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-kube-api-access-r57xj\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.048014 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.048355 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-config\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.048696 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.049509 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-dns-svc\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.049512 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.079313 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r57xj\" (UniqueName: \"kubernetes.io/projected/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-kube-api-access-r57xj\") pod \"dnsmasq-dns-5c9cbcb645-dw9nm\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.111852 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:54:59 crc kubenswrapper[5002]: W1203 16:54:59.628697 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34a0fbfb_baac_41c7_8430_cb0e1720dfa5.slice/crio-c694c2b070b49fa89d101386b041a7e045c135420e19fa507dfc5ef9e4a7de9e WatchSource:0}: Error finding container c694c2b070b49fa89d101386b041a7e045c135420e19fa507dfc5ef9e4a7de9e: Status 404 returned error can't find the container with id c694c2b070b49fa89d101386b041a7e045c135420e19fa507dfc5ef9e4a7de9e Dec 03 16:54:59 crc kubenswrapper[5002]: I1203 16:54:59.639944 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9cbcb645-dw9nm"] Dec 03 16:55:00 crc kubenswrapper[5002]: I1203 16:55:00.514724 5002 generic.go:334] "Generic (PLEG): container finished" podID="34a0fbfb-baac-41c7-8430-cb0e1720dfa5" containerID="5dd60355fb5a4a6102dcb9b1dd2b0adb310b8248a9479047879e2a7f22a1d4bb" exitCode=0 Dec 03 16:55:00 crc kubenswrapper[5002]: I1203 16:55:00.514964 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" event={"ID":"34a0fbfb-baac-41c7-8430-cb0e1720dfa5","Type":"ContainerDied","Data":"5dd60355fb5a4a6102dcb9b1dd2b0adb310b8248a9479047879e2a7f22a1d4bb"} Dec 03 16:55:00 crc kubenswrapper[5002]: I1203 16:55:00.516310 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" event={"ID":"34a0fbfb-baac-41c7-8430-cb0e1720dfa5","Type":"ContainerStarted","Data":"c694c2b070b49fa89d101386b041a7e045c135420e19fa507dfc5ef9e4a7de9e"} Dec 03 16:55:00 crc kubenswrapper[5002]: I1203 16:55:00.798890 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:55:00 crc kubenswrapper[5002]: I1203 16:55:00.801828 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="proxy-httpd" containerID="cri-o://65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b" gracePeriod=30 Dec 03 16:55:00 crc kubenswrapper[5002]: I1203 16:55:00.801888 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="sg-core" containerID="cri-o://d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2" gracePeriod=30 Dec 03 16:55:00 crc kubenswrapper[5002]: I1203 16:55:00.802092 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="ceilometer-notification-agent" containerID="cri-o://34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d" gracePeriod=30 Dec 03 16:55:00 crc kubenswrapper[5002]: I1203 16:55:00.802318 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="ceilometer-central-agent" containerID="cri-o://7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2" gracePeriod=30 Dec 03 16:55:00 crc kubenswrapper[5002]: I1203 16:55:00.819370 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.191:3000/\": EOF" Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.169573 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.170088 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.182150 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.529418 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" event={"ID":"34a0fbfb-baac-41c7-8430-cb0e1720dfa5","Type":"ContainerStarted","Data":"58c1385bcef3302471d6a081ef5d49065e260a3904a830edb4066b5c487279af"} Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.529501 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.532930 5002 generic.go:334] "Generic (PLEG): container finished" podID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerID="65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b" exitCode=0 Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.532961 5002 generic.go:334] "Generic (PLEG): container finished" podID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerID="d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2" exitCode=2 Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.532973 5002 generic.go:334] "Generic (PLEG): container finished" podID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerID="7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2" exitCode=0 Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.532990 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31","Type":"ContainerDied","Data":"65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b"} Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.533031 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31","Type":"ContainerDied","Data":"d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2"} Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.533044 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31","Type":"ContainerDied","Data":"7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2"} Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.552813 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" podStartSLOduration=3.5527941690000002 podStartE2EDuration="3.552794169s" podCreationTimestamp="2025-12-03 16:54:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:55:01.547622029 +0000 UTC m=+1424.961443917" watchObservedRunningTime="2025-12-03 16:55:01.552794169 +0000 UTC m=+1424.966616047" Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.584532 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.584984 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c96d8154-fb63-48a7-bcdd-257582c6e458" containerName="nova-api-api" containerID="cri-o://3a6bc44f51ab701f3337d6dc27d69715e9a69555db46a09cf7250349b253be78" gracePeriod=30 Dec 03 16:55:01 crc kubenswrapper[5002]: I1203 16:55:01.585162 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c96d8154-fb63-48a7-bcdd-257582c6e458" containerName="nova-api-log" containerID="cri-o://191ff587e9eab4cb2a56c3199cde5cee6c4e7ea889229f2d9549884f14e893fb" gracePeriod=30 Dec 03 16:55:02 crc kubenswrapper[5002]: I1203 16:55:02.550313 5002 generic.go:334] "Generic (PLEG): container finished" podID="c96d8154-fb63-48a7-bcdd-257582c6e458" containerID="191ff587e9eab4cb2a56c3199cde5cee6c4e7ea889229f2d9549884f14e893fb" exitCode=143 Dec 03 16:55:02 crc kubenswrapper[5002]: I1203 16:55:02.550372 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c96d8154-fb63-48a7-bcdd-257582c6e458","Type":"ContainerDied","Data":"191ff587e9eab4cb2a56c3199cde5cee6c4e7ea889229f2d9549884f14e893fb"} Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.474694 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.578178 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-run-httpd\") pod \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.578408 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-log-httpd\") pod \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.578542 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-ceilometer-tls-certs\") pod \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.578611 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-combined-ca-bundle\") pod \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.578682 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6q756\" (UniqueName: \"kubernetes.io/projected/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-kube-api-access-6q756\") pod \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.578767 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" (UID: "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.578933 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-sg-core-conf-yaml\") pod \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.579036 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-config-data\") pod \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.579079 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-scripts\") pod \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\" (UID: \"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31\") " Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.579177 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" (UID: "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.580174 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.580223 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.583157 5002 generic.go:334] "Generic (PLEG): container finished" podID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerID="34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d" exitCode=0 Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.583232 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31","Type":"ContainerDied","Data":"34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d"} Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.583301 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fa90f890-dbc5-49c2-a2d4-d4c00cc21e31","Type":"ContainerDied","Data":"b0562746dd1dc756f237fcc1cab1f4e719b2a5b1ad6607996648583bd4c871c7"} Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.583338 5002 scope.go:117] "RemoveContainer" containerID="65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.583669 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.594848 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-scripts" (OuterVolumeSpecName: "scripts") pod "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" (UID: "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.608022 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-kube-api-access-6q756" (OuterVolumeSpecName: "kube-api-access-6q756") pod "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" (UID: "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31"). InnerVolumeSpecName "kube-api-access-6q756". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.629065 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" (UID: "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.643969 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" (UID: "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.681903 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.682427 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.682450 5002 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.682468 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6q756\" (UniqueName: \"kubernetes.io/projected/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-kube-api-access-6q756\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.693733 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" (UID: "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.725449 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-config-data" (OuterVolumeSpecName: "config-data") pod "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" (UID: "fa90f890-dbc5-49c2-a2d4-d4c00cc21e31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.736135 5002 scope.go:117] "RemoveContainer" containerID="d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.761582 5002 scope.go:117] "RemoveContainer" containerID="34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.784463 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.784512 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.787137 5002 scope.go:117] "RemoveContainer" containerID="7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.825833 5002 scope.go:117] "RemoveContainer" containerID="65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b" Dec 03 16:55:04 crc kubenswrapper[5002]: E1203 16:55:04.826623 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b\": container with ID starting with 65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b not found: ID does not exist" containerID="65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.826668 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b"} err="failed to get container status \"65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b\": rpc error: code = NotFound desc = could not find container \"65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b\": container with ID starting with 65179c5f4d614776e106cc44ff01f44a23d5896b7af1e346c5634d0f34dae46b not found: ID does not exist" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.826695 5002 scope.go:117] "RemoveContainer" containerID="d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2" Dec 03 16:55:04 crc kubenswrapper[5002]: E1203 16:55:04.827325 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2\": container with ID starting with d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2 not found: ID does not exist" containerID="d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.827456 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2"} err="failed to get container status \"d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2\": rpc error: code = NotFound desc = could not find container \"d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2\": container with ID starting with d334f7cbb42a8a48acfba788fcacba7b599ec5ae89eccdeec4d432155bd83ed2 not found: ID does not exist" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.827497 5002 scope.go:117] "RemoveContainer" containerID="34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d" Dec 03 16:55:04 crc kubenswrapper[5002]: E1203 16:55:04.828004 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d\": container with ID starting with 34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d not found: ID does not exist" containerID="34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.828088 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d"} err="failed to get container status \"34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d\": rpc error: code = NotFound desc = could not find container \"34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d\": container with ID starting with 34e5f4dc8ad1a358f401f750939fa4d1c60c064f96bc626b2bbec924927f891d not found: ID does not exist" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.828118 5002 scope.go:117] "RemoveContainer" containerID="7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2" Dec 03 16:55:04 crc kubenswrapper[5002]: E1203 16:55:04.828536 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2\": container with ID starting with 7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2 not found: ID does not exist" containerID="7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.828579 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2"} err="failed to get container status \"7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2\": rpc error: code = NotFound desc = could not find container \"7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2\": container with ID starting with 7beb1fe179bb6b21acb80d6ddbed6972f648ebff9c8831163a451747eba8cba2 not found: ID does not exist" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.942816 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.960259 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.983073 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:55:04 crc kubenswrapper[5002]: E1203 16:55:04.983532 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="ceilometer-central-agent" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.983556 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="ceilometer-central-agent" Dec 03 16:55:04 crc kubenswrapper[5002]: E1203 16:55:04.983579 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="sg-core" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.983588 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="sg-core" Dec 03 16:55:04 crc kubenswrapper[5002]: E1203 16:55:04.983604 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="proxy-httpd" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.983614 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="proxy-httpd" Dec 03 16:55:04 crc kubenswrapper[5002]: E1203 16:55:04.983642 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="ceilometer-notification-agent" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.983653 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="ceilometer-notification-agent" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.983890 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="sg-core" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.983912 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="proxy-httpd" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.983928 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="ceilometer-notification-agent" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.983943 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" containerName="ceilometer-central-agent" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.986052 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.989494 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.989716 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.989880 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 16:55:04 crc kubenswrapper[5002]: I1203 16:55:04.993097 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.090578 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.090689 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdcdk\" (UniqueName: \"kubernetes.io/projected/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-kube-api-access-xdcdk\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.090766 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.090818 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.090855 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-scripts\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.090910 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-config-data\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.090952 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-run-httpd\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.090988 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-log-httpd\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.193182 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-run-httpd\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.194092 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-run-httpd\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.194291 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-log-httpd\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.194708 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-log-httpd\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.194967 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.195148 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdcdk\" (UniqueName: \"kubernetes.io/projected/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-kube-api-access-xdcdk\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.195392 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.195537 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.195645 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-scripts\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.195794 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-config-data\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.202291 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.203141 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.204076 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.208137 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-scripts\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.223172 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-config-data\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.227446 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdcdk\" (UniqueName: \"kubernetes.io/projected/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-kube-api-access-xdcdk\") pod \"ceilometer-0\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.306568 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.599507 5002 generic.go:334] "Generic (PLEG): container finished" podID="c96d8154-fb63-48a7-bcdd-257582c6e458" containerID="3a6bc44f51ab701f3337d6dc27d69715e9a69555db46a09cf7250349b253be78" exitCode=0 Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.599607 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c96d8154-fb63-48a7-bcdd-257582c6e458","Type":"ContainerDied","Data":"3a6bc44f51ab701f3337d6dc27d69715e9a69555db46a09cf7250349b253be78"} Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.671134 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.808362 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c96d8154-fb63-48a7-bcdd-257582c6e458-logs\") pod \"c96d8154-fb63-48a7-bcdd-257582c6e458\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.808520 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c96d8154-fb63-48a7-bcdd-257582c6e458-combined-ca-bundle\") pod \"c96d8154-fb63-48a7-bcdd-257582c6e458\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.808742 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfslq\" (UniqueName: \"kubernetes.io/projected/c96d8154-fb63-48a7-bcdd-257582c6e458-kube-api-access-gfslq\") pod \"c96d8154-fb63-48a7-bcdd-257582c6e458\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.808875 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c96d8154-fb63-48a7-bcdd-257582c6e458-config-data\") pod \"c96d8154-fb63-48a7-bcdd-257582c6e458\" (UID: \"c96d8154-fb63-48a7-bcdd-257582c6e458\") " Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.809058 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c96d8154-fb63-48a7-bcdd-257582c6e458-logs" (OuterVolumeSpecName: "logs") pod "c96d8154-fb63-48a7-bcdd-257582c6e458" (UID: "c96d8154-fb63-48a7-bcdd-257582c6e458"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.809493 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c96d8154-fb63-48a7-bcdd-257582c6e458-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.815174 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c96d8154-fb63-48a7-bcdd-257582c6e458-kube-api-access-gfslq" (OuterVolumeSpecName: "kube-api-access-gfslq") pod "c96d8154-fb63-48a7-bcdd-257582c6e458" (UID: "c96d8154-fb63-48a7-bcdd-257582c6e458"). InnerVolumeSpecName "kube-api-access-gfslq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.857917 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c96d8154-fb63-48a7-bcdd-257582c6e458-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c96d8154-fb63-48a7-bcdd-257582c6e458" (UID: "c96d8154-fb63-48a7-bcdd-257582c6e458"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.868138 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.870272 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.886941 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c96d8154-fb63-48a7-bcdd-257582c6e458-config-data" (OuterVolumeSpecName: "config-data") pod "c96d8154-fb63-48a7-bcdd-257582c6e458" (UID: "c96d8154-fb63-48a7-bcdd-257582c6e458"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.973272 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c96d8154-fb63-48a7-bcdd-257582c6e458-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.973571 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c96d8154-fb63-48a7-bcdd-257582c6e458-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:05 crc kubenswrapper[5002]: I1203 16:55:05.973642 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfslq\" (UniqueName: \"kubernetes.io/projected/c96d8154-fb63-48a7-bcdd-257582c6e458-kube-api-access-gfslq\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.169140 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.169228 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.182579 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.204852 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.613691 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a","Type":"ContainerStarted","Data":"39344bf2d53f53c9398beed65520ac747f53c24078d65bec6de505a9dcccfc9c"} Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.619456 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.619511 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c96d8154-fb63-48a7-bcdd-257582c6e458","Type":"ContainerDied","Data":"b2f19e694f122e85fef3e25c8388d28754c9695d1275ee75b27fe7f4ffffb11f"} Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.619554 5002 scope.go:117] "RemoveContainer" containerID="3a6bc44f51ab701f3337d6dc27d69715e9a69555db46a09cf7250349b253be78" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.644541 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.660776 5002 scope.go:117] "RemoveContainer" containerID="191ff587e9eab4cb2a56c3199cde5cee6c4e7ea889229f2d9549884f14e893fb" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.677822 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.705843 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.713975 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 16:55:06 crc kubenswrapper[5002]: E1203 16:55:06.714548 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c96d8154-fb63-48a7-bcdd-257582c6e458" containerName="nova-api-api" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.714574 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c96d8154-fb63-48a7-bcdd-257582c6e458" containerName="nova-api-api" Dec 03 16:55:06 crc kubenswrapper[5002]: E1203 16:55:06.714614 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c96d8154-fb63-48a7-bcdd-257582c6e458" containerName="nova-api-log" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.714624 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c96d8154-fb63-48a7-bcdd-257582c6e458" containerName="nova-api-log" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.714877 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c96d8154-fb63-48a7-bcdd-257582c6e458" containerName="nova-api-log" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.714921 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c96d8154-fb63-48a7-bcdd-257582c6e458" containerName="nova-api-api" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.716228 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.722493 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.722732 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.722881 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.764559 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.852958 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c96d8154-fb63-48a7-bcdd-257582c6e458" path="/var/lib/kubelet/pods/c96d8154-fb63-48a7-bcdd-257582c6e458/volumes" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.853615 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa90f890-dbc5-49c2-a2d4-d4c00cc21e31" path="/var/lib/kubelet/pods/fa90f890-dbc5-49c2-a2d4-d4c00cc21e31/volumes" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.880850 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-pltf4"] Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.882624 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.885633 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.885674 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.890268 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-public-tls-certs\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.890338 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.890360 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9hms\" (UniqueName: \"kubernetes.io/projected/ff796baf-5509-457b-b651-8f6cd700e526-kube-api-access-m9hms\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.890410 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.890445 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff796baf-5509-457b-b651-8f6cd700e526-logs\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.890467 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-config-data\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.918152 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-pltf4"] Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.992209 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crnhb\" (UniqueName: \"kubernetes.io/projected/8fd124dd-d66f-4420-a183-383fc6c2ea02-kube-api-access-crnhb\") pod \"nova-cell1-cell-mapping-pltf4\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.992273 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.992291 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9hms\" (UniqueName: \"kubernetes.io/projected/ff796baf-5509-457b-b651-8f6cd700e526-kube-api-access-m9hms\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.992309 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-scripts\") pod \"nova-cell1-cell-mapping-pltf4\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.992615 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.992668 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff796baf-5509-457b-b651-8f6cd700e526-logs\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.992700 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-config-data\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.992733 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-config-data\") pod \"nova-cell1-cell-mapping-pltf4\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.992839 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pltf4\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.992870 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-public-tls-certs\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:06 crc kubenswrapper[5002]: I1203 16:55:06.995484 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff796baf-5509-457b-b651-8f6cd700e526-logs\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.000958 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.001197 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.009251 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-config-data\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.009449 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-public-tls-certs\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.018172 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9hms\" (UniqueName: \"kubernetes.io/projected/ff796baf-5509-457b-b651-8f6cd700e526-kube-api-access-m9hms\") pod \"nova-api-0\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " pod="openstack/nova-api-0" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.033805 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.101007 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-config-data\") pod \"nova-cell1-cell-mapping-pltf4\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.101147 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pltf4\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.101197 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crnhb\" (UniqueName: \"kubernetes.io/projected/8fd124dd-d66f-4420-a183-383fc6c2ea02-kube-api-access-crnhb\") pod \"nova-cell1-cell-mapping-pltf4\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.101226 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-scripts\") pod \"nova-cell1-cell-mapping-pltf4\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.105700 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-scripts\") pod \"nova-cell1-cell-mapping-pltf4\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.106362 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pltf4\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.109145 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-config-data\") pod \"nova-cell1-cell-mapping-pltf4\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.121669 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crnhb\" (UniqueName: \"kubernetes.io/projected/8fd124dd-d66f-4420-a183-383fc6c2ea02-kube-api-access-crnhb\") pod \"nova-cell1-cell-mapping-pltf4\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.190526 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.190905 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.196275 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.555672 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.635874 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ff796baf-5509-457b-b651-8f6cd700e526","Type":"ContainerStarted","Data":"a6dfa31c5b0191155266bb543205e676e8f56c41dc2fd6e9d9ef1ff158fb6baf"} Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.641248 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a","Type":"ContainerStarted","Data":"b8df45427a97ecede50bedecdafce707afb7a6c3b99bed2b3b06ade3f707f8c0"} Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.641273 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a","Type":"ContainerStarted","Data":"f170d2d5c5453f499381793538a1774fccdd3cc83fae41dd5c62f54865d69fea"} Dec 03 16:55:07 crc kubenswrapper[5002]: I1203 16:55:07.729606 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-pltf4"] Dec 03 16:55:07 crc kubenswrapper[5002]: W1203 16:55:07.735826 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fd124dd_d66f_4420_a183_383fc6c2ea02.slice/crio-471ce354fbd908886c61a5f17e3731089e63adb1088689b18f871a3542357748 WatchSource:0}: Error finding container 471ce354fbd908886c61a5f17e3731089e63adb1088689b18f871a3542357748: Status 404 returned error can't find the container with id 471ce354fbd908886c61a5f17e3731089e63adb1088689b18f871a3542357748 Dec 03 16:55:08 crc kubenswrapper[5002]: I1203 16:55:08.654972 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a","Type":"ContainerStarted","Data":"739bab8b7d0739633fc83f4a91f4b03d89579795c296b2c49a39101f7359c721"} Dec 03 16:55:08 crc kubenswrapper[5002]: I1203 16:55:08.657603 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ff796baf-5509-457b-b651-8f6cd700e526","Type":"ContainerStarted","Data":"bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15"} Dec 03 16:55:08 crc kubenswrapper[5002]: I1203 16:55:08.657716 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ff796baf-5509-457b-b651-8f6cd700e526","Type":"ContainerStarted","Data":"6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8"} Dec 03 16:55:08 crc kubenswrapper[5002]: I1203 16:55:08.659417 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pltf4" event={"ID":"8fd124dd-d66f-4420-a183-383fc6c2ea02","Type":"ContainerStarted","Data":"3dea21c55187632d7d42ec51fafaac9c2d306b2b50b5e53b5794f26fbc124243"} Dec 03 16:55:08 crc kubenswrapper[5002]: I1203 16:55:08.659541 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pltf4" event={"ID":"8fd124dd-d66f-4420-a183-383fc6c2ea02","Type":"ContainerStarted","Data":"471ce354fbd908886c61a5f17e3731089e63adb1088689b18f871a3542357748"} Dec 03 16:55:08 crc kubenswrapper[5002]: I1203 16:55:08.699107 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.699074796 podStartE2EDuration="2.699074796s" podCreationTimestamp="2025-12-03 16:55:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:55:08.682253178 +0000 UTC m=+1432.096075076" watchObservedRunningTime="2025-12-03 16:55:08.699074796 +0000 UTC m=+1432.112896724" Dec 03 16:55:08 crc kubenswrapper[5002]: I1203 16:55:08.709433 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-pltf4" podStartSLOduration=2.709408067 podStartE2EDuration="2.709408067s" podCreationTimestamp="2025-12-03 16:55:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:55:08.70220263 +0000 UTC m=+1432.116024528" watchObservedRunningTime="2025-12-03 16:55:08.709408067 +0000 UTC m=+1432.123229995" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.113858 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.197955 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c4475fdfc-pv642"] Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.198569 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" podUID="7ca5d77b-a995-41c0-ba36-54a896f63a7a" containerName="dnsmasq-dns" containerID="cri-o://494ae1cdce3c8bc209ceb1c212726454d0a091e15dcb7b7aecd3e12a2e90df3e" gracePeriod=10 Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.671001 5002 generic.go:334] "Generic (PLEG): container finished" podID="7ca5d77b-a995-41c0-ba36-54a896f63a7a" containerID="494ae1cdce3c8bc209ceb1c212726454d0a091e15dcb7b7aecd3e12a2e90df3e" exitCode=0 Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.671079 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" event={"ID":"7ca5d77b-a995-41c0-ba36-54a896f63a7a","Type":"ContainerDied","Data":"494ae1cdce3c8bc209ceb1c212726454d0a091e15dcb7b7aecd3e12a2e90df3e"} Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.671494 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" event={"ID":"7ca5d77b-a995-41c0-ba36-54a896f63a7a","Type":"ContainerDied","Data":"7755eff49c96a2e5b4f8d12a176dd46291a19867da56e49f826ce49b38eac8d7"} Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.671509 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7755eff49c96a2e5b4f8d12a176dd46291a19867da56e49f826ce49b38eac8d7" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.725389 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.884329 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-ovsdbserver-nb\") pod \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.884434 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-dns-swift-storage-0\") pod \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.884551 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djhx8\" (UniqueName: \"kubernetes.io/projected/7ca5d77b-a995-41c0-ba36-54a896f63a7a-kube-api-access-djhx8\") pod \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.884578 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-config\") pod \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.884636 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-ovsdbserver-sb\") pod \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.884682 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-dns-svc\") pod \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\" (UID: \"7ca5d77b-a995-41c0-ba36-54a896f63a7a\") " Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.908025 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ca5d77b-a995-41c0-ba36-54a896f63a7a-kube-api-access-djhx8" (OuterVolumeSpecName: "kube-api-access-djhx8") pod "7ca5d77b-a995-41c0-ba36-54a896f63a7a" (UID: "7ca5d77b-a995-41c0-ba36-54a896f63a7a"). InnerVolumeSpecName "kube-api-access-djhx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.946734 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7ca5d77b-a995-41c0-ba36-54a896f63a7a" (UID: "7ca5d77b-a995-41c0-ba36-54a896f63a7a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.950639 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-config" (OuterVolumeSpecName: "config") pod "7ca5d77b-a995-41c0-ba36-54a896f63a7a" (UID: "7ca5d77b-a995-41c0-ba36-54a896f63a7a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.960344 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7ca5d77b-a995-41c0-ba36-54a896f63a7a" (UID: "7ca5d77b-a995-41c0-ba36-54a896f63a7a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.972360 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7ca5d77b-a995-41c0-ba36-54a896f63a7a" (UID: "7ca5d77b-a995-41c0-ba36-54a896f63a7a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.987218 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.987260 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.987273 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djhx8\" (UniqueName: \"kubernetes.io/projected/7ca5d77b-a995-41c0-ba36-54a896f63a7a-kube-api-access-djhx8\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.987284 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.987294 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:09 crc kubenswrapper[5002]: I1203 16:55:09.988398 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7ca5d77b-a995-41c0-ba36-54a896f63a7a" (UID: "7ca5d77b-a995-41c0-ba36-54a896f63a7a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:55:10 crc kubenswrapper[5002]: I1203 16:55:10.089124 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ca5d77b-a995-41c0-ba36-54a896f63a7a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:10 crc kubenswrapper[5002]: I1203 16:55:10.697400 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c4475fdfc-pv642" Dec 03 16:55:10 crc kubenswrapper[5002]: I1203 16:55:10.698943 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a","Type":"ContainerStarted","Data":"dfddf060fa4fddc35fda40088529fbf414441a192eea714876824df928864e7e"} Dec 03 16:55:10 crc kubenswrapper[5002]: I1203 16:55:10.699555 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:55:10 crc kubenswrapper[5002]: I1203 16:55:10.734339 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.794352254 podStartE2EDuration="6.7343207s" podCreationTimestamp="2025-12-03 16:55:04 +0000 UTC" firstStartedPulling="2025-12-03 16:55:05.870074431 +0000 UTC m=+1429.283896319" lastFinishedPulling="2025-12-03 16:55:09.810042877 +0000 UTC m=+1433.223864765" observedRunningTime="2025-12-03 16:55:10.726535258 +0000 UTC m=+1434.140357166" watchObservedRunningTime="2025-12-03 16:55:10.7343207 +0000 UTC m=+1434.148142588" Dec 03 16:55:10 crc kubenswrapper[5002]: I1203 16:55:10.762076 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c4475fdfc-pv642"] Dec 03 16:55:10 crc kubenswrapper[5002]: I1203 16:55:10.773370 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c4475fdfc-pv642"] Dec 03 16:55:10 crc kubenswrapper[5002]: I1203 16:55:10.850328 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ca5d77b-a995-41c0-ba36-54a896f63a7a" path="/var/lib/kubelet/pods/7ca5d77b-a995-41c0-ba36-54a896f63a7a/volumes" Dec 03 16:55:13 crc kubenswrapper[5002]: I1203 16:55:13.735599 5002 generic.go:334] "Generic (PLEG): container finished" podID="8fd124dd-d66f-4420-a183-383fc6c2ea02" containerID="3dea21c55187632d7d42ec51fafaac9c2d306b2b50b5e53b5794f26fbc124243" exitCode=0 Dec 03 16:55:13 crc kubenswrapper[5002]: I1203 16:55:13.736571 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pltf4" event={"ID":"8fd124dd-d66f-4420-a183-383fc6c2ea02","Type":"ContainerDied","Data":"3dea21c55187632d7d42ec51fafaac9c2d306b2b50b5e53b5794f26fbc124243"} Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.128906 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.320360 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crnhb\" (UniqueName: \"kubernetes.io/projected/8fd124dd-d66f-4420-a183-383fc6c2ea02-kube-api-access-crnhb\") pod \"8fd124dd-d66f-4420-a183-383fc6c2ea02\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.320457 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-scripts\") pod \"8fd124dd-d66f-4420-a183-383fc6c2ea02\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.320565 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-config-data\") pod \"8fd124dd-d66f-4420-a183-383fc6c2ea02\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.321804 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-combined-ca-bundle\") pod \"8fd124dd-d66f-4420-a183-383fc6c2ea02\" (UID: \"8fd124dd-d66f-4420-a183-383fc6c2ea02\") " Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.330396 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fd124dd-d66f-4420-a183-383fc6c2ea02-kube-api-access-crnhb" (OuterVolumeSpecName: "kube-api-access-crnhb") pod "8fd124dd-d66f-4420-a183-383fc6c2ea02" (UID: "8fd124dd-d66f-4420-a183-383fc6c2ea02"). InnerVolumeSpecName "kube-api-access-crnhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.331254 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-scripts" (OuterVolumeSpecName: "scripts") pod "8fd124dd-d66f-4420-a183-383fc6c2ea02" (UID: "8fd124dd-d66f-4420-a183-383fc6c2ea02"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.374873 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-config-data" (OuterVolumeSpecName: "config-data") pod "8fd124dd-d66f-4420-a183-383fc6c2ea02" (UID: "8fd124dd-d66f-4420-a183-383fc6c2ea02"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.375737 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8fd124dd-d66f-4420-a183-383fc6c2ea02" (UID: "8fd124dd-d66f-4420-a183-383fc6c2ea02"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.424485 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crnhb\" (UniqueName: \"kubernetes.io/projected/8fd124dd-d66f-4420-a183-383fc6c2ea02-kube-api-access-crnhb\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.424533 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.424549 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.424564 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd124dd-d66f-4420-a183-383fc6c2ea02-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.775687 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pltf4" event={"ID":"8fd124dd-d66f-4420-a183-383fc6c2ea02","Type":"ContainerDied","Data":"471ce354fbd908886c61a5f17e3731089e63adb1088689b18f871a3542357748"} Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.775817 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="471ce354fbd908886c61a5f17e3731089e63adb1088689b18f871a3542357748" Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.775822 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pltf4" Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.998230 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.998540 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ff796baf-5509-457b-b651-8f6cd700e526" containerName="nova-api-log" containerID="cri-o://6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8" gracePeriod=30 Dec 03 16:55:15 crc kubenswrapper[5002]: I1203 16:55:15.999218 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ff796baf-5509-457b-b651-8f6cd700e526" containerName="nova-api-api" containerID="cri-o://bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15" gracePeriod=30 Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.019388 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.019628 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="e7fdf904-293d-4be8-a8ea-01aba8494aa5" containerName="nova-scheduler-scheduler" containerID="cri-o://225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596" gracePeriod=30 Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.040274 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.040566 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" containerName="nova-metadata-log" containerID="cri-o://0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12" gracePeriod=30 Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.040859 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" containerName="nova-metadata-metadata" containerID="cri-o://2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9" gracePeriod=30 Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.594901 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.750951 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9hms\" (UniqueName: \"kubernetes.io/projected/ff796baf-5509-457b-b651-8f6cd700e526-kube-api-access-m9hms\") pod \"ff796baf-5509-457b-b651-8f6cd700e526\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.751107 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-public-tls-certs\") pod \"ff796baf-5509-457b-b651-8f6cd700e526\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.751249 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-config-data\") pod \"ff796baf-5509-457b-b651-8f6cd700e526\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.752445 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-internal-tls-certs\") pod \"ff796baf-5509-457b-b651-8f6cd700e526\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.752494 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-combined-ca-bundle\") pod \"ff796baf-5509-457b-b651-8f6cd700e526\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.752548 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff796baf-5509-457b-b651-8f6cd700e526-logs\") pod \"ff796baf-5509-457b-b651-8f6cd700e526\" (UID: \"ff796baf-5509-457b-b651-8f6cd700e526\") " Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.752939 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff796baf-5509-457b-b651-8f6cd700e526-logs" (OuterVolumeSpecName: "logs") pod "ff796baf-5509-457b-b651-8f6cd700e526" (UID: "ff796baf-5509-457b-b651-8f6cd700e526"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.753264 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff796baf-5509-457b-b651-8f6cd700e526-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.756865 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff796baf-5509-457b-b651-8f6cd700e526-kube-api-access-m9hms" (OuterVolumeSpecName: "kube-api-access-m9hms") pod "ff796baf-5509-457b-b651-8f6cd700e526" (UID: "ff796baf-5509-457b-b651-8f6cd700e526"). InnerVolumeSpecName "kube-api-access-m9hms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.786999 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-config-data" (OuterVolumeSpecName: "config-data") pod "ff796baf-5509-457b-b651-8f6cd700e526" (UID: "ff796baf-5509-457b-b651-8f6cd700e526"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.789238 5002 generic.go:334] "Generic (PLEG): container finished" podID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" containerID="0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12" exitCode=143 Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.789309 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"46f912d8-a71e-4c36-93cd-7dbab1c9558a","Type":"ContainerDied","Data":"0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12"} Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.791862 5002 generic.go:334] "Generic (PLEG): container finished" podID="ff796baf-5509-457b-b651-8f6cd700e526" containerID="bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15" exitCode=0 Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.791892 5002 generic.go:334] "Generic (PLEG): container finished" podID="ff796baf-5509-457b-b651-8f6cd700e526" containerID="6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8" exitCode=143 Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.791950 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ff796baf-5509-457b-b651-8f6cd700e526","Type":"ContainerDied","Data":"bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15"} Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.791974 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ff796baf-5509-457b-b651-8f6cd700e526","Type":"ContainerDied","Data":"6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8"} Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.791989 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ff796baf-5509-457b-b651-8f6cd700e526","Type":"ContainerDied","Data":"a6dfa31c5b0191155266bb543205e676e8f56c41dc2fd6e9d9ef1ff158fb6baf"} Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.792034 5002 scope.go:117] "RemoveContainer" containerID="bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.792242 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.806412 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ff796baf-5509-457b-b651-8f6cd700e526" (UID: "ff796baf-5509-457b-b651-8f6cd700e526"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.813463 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff796baf-5509-457b-b651-8f6cd700e526" (UID: "ff796baf-5509-457b-b651-8f6cd700e526"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.815157 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ff796baf-5509-457b-b651-8f6cd700e526" (UID: "ff796baf-5509-457b-b651-8f6cd700e526"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.820473 5002 scope.go:117] "RemoveContainer" containerID="6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.839379 5002 scope.go:117] "RemoveContainer" containerID="bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15" Dec 03 16:55:16 crc kubenswrapper[5002]: E1203 16:55:16.839883 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15\": container with ID starting with bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15 not found: ID does not exist" containerID="bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.839934 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15"} err="failed to get container status \"bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15\": rpc error: code = NotFound desc = could not find container \"bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15\": container with ID starting with bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15 not found: ID does not exist" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.839970 5002 scope.go:117] "RemoveContainer" containerID="6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8" Dec 03 16:55:16 crc kubenswrapper[5002]: E1203 16:55:16.840277 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8\": container with ID starting with 6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8 not found: ID does not exist" containerID="6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.840310 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8"} err="failed to get container status \"6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8\": rpc error: code = NotFound desc = could not find container \"6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8\": container with ID starting with 6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8 not found: ID does not exist" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.840336 5002 scope.go:117] "RemoveContainer" containerID="bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.840587 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15"} err="failed to get container status \"bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15\": rpc error: code = NotFound desc = could not find container \"bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15\": container with ID starting with bd55b62f156b2a0f9373ea09130b333d0d90d05b0abfafa0eb4ac75cf3655b15 not found: ID does not exist" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.840615 5002 scope.go:117] "RemoveContainer" containerID="6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.840914 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8"} err="failed to get container status \"6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8\": rpc error: code = NotFound desc = could not find container \"6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8\": container with ID starting with 6350b97d4153e4f2dc55de5717868c71caeba9a2048c272d4b87003a81fa51c8 not found: ID does not exist" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.855120 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9hms\" (UniqueName: \"kubernetes.io/projected/ff796baf-5509-457b-b651-8f6cd700e526-kube-api-access-m9hms\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.855151 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.855161 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.855171 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:16 crc kubenswrapper[5002]: I1203 16:55:16.855182 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff796baf-5509-457b-b651-8f6cd700e526-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.119987 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.131557 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.178120 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 16:55:17 crc kubenswrapper[5002]: E1203 16:55:17.179278 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fd124dd-d66f-4420-a183-383fc6c2ea02" containerName="nova-manage" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.179324 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fd124dd-d66f-4420-a183-383fc6c2ea02" containerName="nova-manage" Dec 03 16:55:17 crc kubenswrapper[5002]: E1203 16:55:17.179371 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ca5d77b-a995-41c0-ba36-54a896f63a7a" containerName="dnsmasq-dns" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.179383 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ca5d77b-a995-41c0-ba36-54a896f63a7a" containerName="dnsmasq-dns" Dec 03 16:55:17 crc kubenswrapper[5002]: E1203 16:55:17.179399 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ca5d77b-a995-41c0-ba36-54a896f63a7a" containerName="init" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.179411 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ca5d77b-a995-41c0-ba36-54a896f63a7a" containerName="init" Dec 03 16:55:17 crc kubenswrapper[5002]: E1203 16:55:17.179437 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff796baf-5509-457b-b651-8f6cd700e526" containerName="nova-api-log" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.179448 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff796baf-5509-457b-b651-8f6cd700e526" containerName="nova-api-log" Dec 03 16:55:17 crc kubenswrapper[5002]: E1203 16:55:17.179470 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff796baf-5509-457b-b651-8f6cd700e526" containerName="nova-api-api" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.179481 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff796baf-5509-457b-b651-8f6cd700e526" containerName="nova-api-api" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.179823 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fd124dd-d66f-4420-a183-383fc6c2ea02" containerName="nova-manage" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.179860 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ca5d77b-a995-41c0-ba36-54a896f63a7a" containerName="dnsmasq-dns" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.179873 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff796baf-5509-457b-b651-8f6cd700e526" containerName="nova-api-api" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.179911 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff796baf-5509-457b-b651-8f6cd700e526" containerName="nova-api-log" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.181895 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.184095 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.185524 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.186453 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.212937 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.261912 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.261964 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbv2k\" (UniqueName: \"kubernetes.io/projected/b576f304-fd7e-419e-937c-dafaf1c00970-kube-api-access-xbv2k\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.261983 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b576f304-fd7e-419e-937c-dafaf1c00970-logs\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.262033 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-public-tls-certs\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.262126 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.262416 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-config-data\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.363204 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-public-tls-certs\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.363250 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.363314 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-config-data\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.363357 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.363384 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbv2k\" (UniqueName: \"kubernetes.io/projected/b576f304-fd7e-419e-937c-dafaf1c00970-kube-api-access-xbv2k\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.364088 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b576f304-fd7e-419e-937c-dafaf1c00970-logs\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.364458 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b576f304-fd7e-419e-937c-dafaf1c00970-logs\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.368462 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-public-tls-certs\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.368796 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.370001 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.370531 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-config-data\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.380288 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbv2k\" (UniqueName: \"kubernetes.io/projected/b576f304-fd7e-419e-937c-dafaf1c00970-kube-api-access-xbv2k\") pod \"nova-api-0\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: I1203 16:55:17.506777 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:55:17 crc kubenswrapper[5002]: E1203 16:55:17.540160 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:55:17 crc kubenswrapper[5002]: E1203 16:55:17.542821 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:55:17 crc kubenswrapper[5002]: E1203 16:55:17.544725 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:55:17 crc kubenswrapper[5002]: E1203 16:55:17.544779 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="e7fdf904-293d-4be8-a8ea-01aba8494aa5" containerName="nova-scheduler-scheduler" Dec 03 16:55:18 crc kubenswrapper[5002]: I1203 16:55:18.000063 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:55:18 crc kubenswrapper[5002]: I1203 16:55:18.827047 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b576f304-fd7e-419e-937c-dafaf1c00970","Type":"ContainerStarted","Data":"a5c953152a2dc657ee61cb4f66aa54e6b56055c989e05626225aa5ceeb69b1b2"} Dec 03 16:55:18 crc kubenswrapper[5002]: I1203 16:55:18.827392 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b576f304-fd7e-419e-937c-dafaf1c00970","Type":"ContainerStarted","Data":"b860ed590860a3a1a54cd9b7b0258210275e0aeba1d0db0458d40dced1fb66a7"} Dec 03 16:55:18 crc kubenswrapper[5002]: I1203 16:55:18.827406 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b576f304-fd7e-419e-937c-dafaf1c00970","Type":"ContainerStarted","Data":"7ecdd1c601083ca7243db68655892feaf80e4f21bb99ba10c65f800bc452a956"} Dec 03 16:55:18 crc kubenswrapper[5002]: I1203 16:55:18.860047 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff796baf-5509-457b-b651-8f6cd700e526" path="/var/lib/kubelet/pods/ff796baf-5509-457b-b651-8f6cd700e526/volumes" Dec 03 16:55:18 crc kubenswrapper[5002]: I1203 16:55:18.868957 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.868934332 podStartE2EDuration="1.868934332s" podCreationTimestamp="2025-12-03 16:55:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:55:18.850911812 +0000 UTC m=+1442.264733740" watchObservedRunningTime="2025-12-03 16:55:18.868934332 +0000 UTC m=+1442.282756220" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.707911 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.742913 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46f912d8-a71e-4c36-93cd-7dbab1c9558a-logs\") pod \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.743023 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-nova-metadata-tls-certs\") pod \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.743264 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-config-data\") pod \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.743326 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-combined-ca-bundle\") pod \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.743552 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mp2j\" (UniqueName: \"kubernetes.io/projected/46f912d8-a71e-4c36-93cd-7dbab1c9558a-kube-api-access-6mp2j\") pod \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\" (UID: \"46f912d8-a71e-4c36-93cd-7dbab1c9558a\") " Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.745685 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46f912d8-a71e-4c36-93cd-7dbab1c9558a-logs" (OuterVolumeSpecName: "logs") pod "46f912d8-a71e-4c36-93cd-7dbab1c9558a" (UID: "46f912d8-a71e-4c36-93cd-7dbab1c9558a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.754574 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46f912d8-a71e-4c36-93cd-7dbab1c9558a-kube-api-access-6mp2j" (OuterVolumeSpecName: "kube-api-access-6mp2j") pod "46f912d8-a71e-4c36-93cd-7dbab1c9558a" (UID: "46f912d8-a71e-4c36-93cd-7dbab1c9558a"). InnerVolumeSpecName "kube-api-access-6mp2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.791831 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-config-data" (OuterVolumeSpecName: "config-data") pod "46f912d8-a71e-4c36-93cd-7dbab1c9558a" (UID: "46f912d8-a71e-4c36-93cd-7dbab1c9558a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.814409 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46f912d8-a71e-4c36-93cd-7dbab1c9558a" (UID: "46f912d8-a71e-4c36-93cd-7dbab1c9558a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.820466 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "46f912d8-a71e-4c36-93cd-7dbab1c9558a" (UID: "46f912d8-a71e-4c36-93cd-7dbab1c9558a"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.846133 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.846171 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.846188 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mp2j\" (UniqueName: \"kubernetes.io/projected/46f912d8-a71e-4c36-93cd-7dbab1c9558a-kube-api-access-6mp2j\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.846203 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46f912d8-a71e-4c36-93cd-7dbab1c9558a-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.846215 5002 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/46f912d8-a71e-4c36-93cd-7dbab1c9558a-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.847335 5002 generic.go:334] "Generic (PLEG): container finished" podID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" containerID="2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9" exitCode=0 Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.847453 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"46f912d8-a71e-4c36-93cd-7dbab1c9558a","Type":"ContainerDied","Data":"2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9"} Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.847544 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"46f912d8-a71e-4c36-93cd-7dbab1c9558a","Type":"ContainerDied","Data":"805ddf530c513a3ef8319cc1d92dec2338143dbe2199612d4a7ab671932d9e4d"} Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.847570 5002 scope.go:117] "RemoveContainer" containerID="2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.847483 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.895779 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.903109 5002 scope.go:117] "RemoveContainer" containerID="0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.918363 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.931548 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:55:19 crc kubenswrapper[5002]: E1203 16:55:19.931987 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" containerName="nova-metadata-metadata" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.932010 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" containerName="nova-metadata-metadata" Dec 03 16:55:19 crc kubenswrapper[5002]: E1203 16:55:19.932053 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" containerName="nova-metadata-log" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.932062 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" containerName="nova-metadata-log" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.932317 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" containerName="nova-metadata-metadata" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.932336 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" containerName="nova-metadata-log" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.933073 5002 scope.go:117] "RemoveContainer" containerID="2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9" Dec 03 16:55:19 crc kubenswrapper[5002]: E1203 16:55:19.933657 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9\": container with ID starting with 2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9 not found: ID does not exist" containerID="2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.933714 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9"} err="failed to get container status \"2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9\": rpc error: code = NotFound desc = could not find container \"2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9\": container with ID starting with 2c469e6ace08c9fc6a00bd8768cc90553d25cb6e9506a1a603923659f52debc9 not found: ID does not exist" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.933761 5002 scope.go:117] "RemoveContainer" containerID="0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12" Dec 03 16:55:19 crc kubenswrapper[5002]: E1203 16:55:19.934248 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12\": container with ID starting with 0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12 not found: ID does not exist" containerID="0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.934313 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12"} err="failed to get container status \"0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12\": rpc error: code = NotFound desc = could not find container \"0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12\": container with ID starting with 0e287aed2797dd2cbdf0206284ae7371c37ffe82956cf40724ab0cce71becd12 not found: ID does not exist" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.936715 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.940878 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.944395 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 16:55:19 crc kubenswrapper[5002]: I1203 16:55:19.964150 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.051172 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e408ec9-5662-43a6-93fd-9fa7a60f98db-logs\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.051270 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-896gt\" (UniqueName: \"kubernetes.io/projected/4e408ec9-5662-43a6-93fd-9fa7a60f98db-kube-api-access-896gt\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.051340 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-config-data\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.051435 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.051564 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.153949 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.154034 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e408ec9-5662-43a6-93fd-9fa7a60f98db-logs\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.154067 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-896gt\" (UniqueName: \"kubernetes.io/projected/4e408ec9-5662-43a6-93fd-9fa7a60f98db-kube-api-access-896gt\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.154190 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-config-data\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.154339 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.155054 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e408ec9-5662-43a6-93fd-9fa7a60f98db-logs\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.160706 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.160821 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-config-data\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.160998 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.172570 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-896gt\" (UniqueName: \"kubernetes.io/projected/4e408ec9-5662-43a6-93fd-9fa7a60f98db-kube-api-access-896gt\") pod \"nova-metadata-0\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.263161 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.832996 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.869356 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46f912d8-a71e-4c36-93cd-7dbab1c9558a" path="/var/lib/kubelet/pods/46f912d8-a71e-4c36-93cd-7dbab1c9558a/volumes" Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.870630 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4e408ec9-5662-43a6-93fd-9fa7a60f98db","Type":"ContainerStarted","Data":"bbe2261e50861c45e2046d54d52377830a569545ea180eed78bd4c9bab1a1985"} Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.916683 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:55:20 crc kubenswrapper[5002]: I1203 16:55:20.916861 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.791594 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.876909 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4e408ec9-5662-43a6-93fd-9fa7a60f98db","Type":"ContainerStarted","Data":"a31812f2b35f2e5582ee415727310457b192635bb15360c9e32d188a23358eea"} Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.876976 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4e408ec9-5662-43a6-93fd-9fa7a60f98db","Type":"ContainerStarted","Data":"5e6958690a01bcd45b235c0f78e4750c3aa535d1ce524a8ad09dfce393a6bba0"} Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.879318 5002 generic.go:334] "Generic (PLEG): container finished" podID="e7fdf904-293d-4be8-a8ea-01aba8494aa5" containerID="225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596" exitCode=0 Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.879375 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e7fdf904-293d-4be8-a8ea-01aba8494aa5","Type":"ContainerDied","Data":"225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596"} Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.879479 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e7fdf904-293d-4be8-a8ea-01aba8494aa5","Type":"ContainerDied","Data":"30132d31aca3887e47f7869b8050ef84f5ea73786f4176e62073dbbb358aa392"} Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.879427 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.879506 5002 scope.go:117] "RemoveContainer" containerID="225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596" Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.886186 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7fdf904-293d-4be8-a8ea-01aba8494aa5-combined-ca-bundle\") pod \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\" (UID: \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\") " Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.886483 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p9kbt\" (UniqueName: \"kubernetes.io/projected/e7fdf904-293d-4be8-a8ea-01aba8494aa5-kube-api-access-p9kbt\") pod \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\" (UID: \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\") " Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.886590 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7fdf904-293d-4be8-a8ea-01aba8494aa5-config-data\") pod \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\" (UID: \"e7fdf904-293d-4be8-a8ea-01aba8494aa5\") " Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.892644 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7fdf904-293d-4be8-a8ea-01aba8494aa5-kube-api-access-p9kbt" (OuterVolumeSpecName: "kube-api-access-p9kbt") pod "e7fdf904-293d-4be8-a8ea-01aba8494aa5" (UID: "e7fdf904-293d-4be8-a8ea-01aba8494aa5"). InnerVolumeSpecName "kube-api-access-p9kbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.914455 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.914430537 podStartE2EDuration="2.914430537s" podCreationTimestamp="2025-12-03 16:55:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:55:21.904830726 +0000 UTC m=+1445.318652634" watchObservedRunningTime="2025-12-03 16:55:21.914430537 +0000 UTC m=+1445.328252445" Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.936036 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7fdf904-293d-4be8-a8ea-01aba8494aa5-config-data" (OuterVolumeSpecName: "config-data") pod "e7fdf904-293d-4be8-a8ea-01aba8494aa5" (UID: "e7fdf904-293d-4be8-a8ea-01aba8494aa5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.939640 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7fdf904-293d-4be8-a8ea-01aba8494aa5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7fdf904-293d-4be8-a8ea-01aba8494aa5" (UID: "e7fdf904-293d-4be8-a8ea-01aba8494aa5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.971614 5002 scope.go:117] "RemoveContainer" containerID="225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596" Dec 03 16:55:21 crc kubenswrapper[5002]: E1203 16:55:21.972093 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596\": container with ID starting with 225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596 not found: ID does not exist" containerID="225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596" Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.972131 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596"} err="failed to get container status \"225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596\": rpc error: code = NotFound desc = could not find container \"225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596\": container with ID starting with 225fcc1c4326eaaa36333ac940f1b7c8b27b8209c94c16c692687df4f322d596 not found: ID does not exist" Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.992433 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7fdf904-293d-4be8-a8ea-01aba8494aa5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.992504 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p9kbt\" (UniqueName: \"kubernetes.io/projected/e7fdf904-293d-4be8-a8ea-01aba8494aa5-kube-api-access-p9kbt\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:21 crc kubenswrapper[5002]: I1203 16:55:21.992521 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7fdf904-293d-4be8-a8ea-01aba8494aa5-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.244420 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.258482 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.270436 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:55:22 crc kubenswrapper[5002]: E1203 16:55:22.271320 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7fdf904-293d-4be8-a8ea-01aba8494aa5" containerName="nova-scheduler-scheduler" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.271375 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7fdf904-293d-4be8-a8ea-01aba8494aa5" containerName="nova-scheduler-scheduler" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.271944 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7fdf904-293d-4be8-a8ea-01aba8494aa5" containerName="nova-scheduler-scheduler" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.273458 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.276894 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.281819 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.300160 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whdtn\" (UniqueName: \"kubernetes.io/projected/86921476-d5b9-4fc0-86d1-aa82dd931e5f-kube-api-access-whdtn\") pod \"nova-scheduler-0\" (UID: \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\") " pod="openstack/nova-scheduler-0" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.300285 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86921476-d5b9-4fc0-86d1-aa82dd931e5f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\") " pod="openstack/nova-scheduler-0" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.300346 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86921476-d5b9-4fc0-86d1-aa82dd931e5f-config-data\") pod \"nova-scheduler-0\" (UID: \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\") " pod="openstack/nova-scheduler-0" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.402225 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whdtn\" (UniqueName: \"kubernetes.io/projected/86921476-d5b9-4fc0-86d1-aa82dd931e5f-kube-api-access-whdtn\") pod \"nova-scheduler-0\" (UID: \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\") " pod="openstack/nova-scheduler-0" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.402676 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86921476-d5b9-4fc0-86d1-aa82dd931e5f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\") " pod="openstack/nova-scheduler-0" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.403008 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86921476-d5b9-4fc0-86d1-aa82dd931e5f-config-data\") pod \"nova-scheduler-0\" (UID: \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\") " pod="openstack/nova-scheduler-0" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.408084 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86921476-d5b9-4fc0-86d1-aa82dd931e5f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\") " pod="openstack/nova-scheduler-0" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.409773 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86921476-d5b9-4fc0-86d1-aa82dd931e5f-config-data\") pod \"nova-scheduler-0\" (UID: \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\") " pod="openstack/nova-scheduler-0" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.420090 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whdtn\" (UniqueName: \"kubernetes.io/projected/86921476-d5b9-4fc0-86d1-aa82dd931e5f-kube-api-access-whdtn\") pod \"nova-scheduler-0\" (UID: \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\") " pod="openstack/nova-scheduler-0" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.642087 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:55:22 crc kubenswrapper[5002]: I1203 16:55:22.856792 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7fdf904-293d-4be8-a8ea-01aba8494aa5" path="/var/lib/kubelet/pods/e7fdf904-293d-4be8-a8ea-01aba8494aa5/volumes" Dec 03 16:55:23 crc kubenswrapper[5002]: I1203 16:55:23.178345 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:55:23 crc kubenswrapper[5002]: W1203 16:55:23.182996 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod86921476_d5b9_4fc0_86d1_aa82dd931e5f.slice/crio-bb3ce16779002af79f9743393a59d7c153b54aea52b2c39b5dbb2e3c6ca54100 WatchSource:0}: Error finding container bb3ce16779002af79f9743393a59d7c153b54aea52b2c39b5dbb2e3c6ca54100: Status 404 returned error can't find the container with id bb3ce16779002af79f9743393a59d7c153b54aea52b2c39b5dbb2e3c6ca54100 Dec 03 16:55:23 crc kubenswrapper[5002]: I1203 16:55:23.907923 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"86921476-d5b9-4fc0-86d1-aa82dd931e5f","Type":"ContainerStarted","Data":"9bf24b4c96cc80a30cc1370d78b90b73f20e379aa3232516e9166ca4be82531b"} Dec 03 16:55:23 crc kubenswrapper[5002]: I1203 16:55:23.908388 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"86921476-d5b9-4fc0-86d1-aa82dd931e5f","Type":"ContainerStarted","Data":"bb3ce16779002af79f9743393a59d7c153b54aea52b2c39b5dbb2e3c6ca54100"} Dec 03 16:55:23 crc kubenswrapper[5002]: I1203 16:55:23.946595 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.9465645660000002 podStartE2EDuration="1.946564566s" podCreationTimestamp="2025-12-03 16:55:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:55:23.93163639 +0000 UTC m=+1447.345458328" watchObservedRunningTime="2025-12-03 16:55:23.946564566 +0000 UTC m=+1447.360386484" Dec 03 16:55:25 crc kubenswrapper[5002]: I1203 16:55:25.263624 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 16:55:25 crc kubenswrapper[5002]: I1203 16:55:25.264101 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 16:55:27 crc kubenswrapper[5002]: I1203 16:55:27.507581 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:55:27 crc kubenswrapper[5002]: I1203 16:55:27.507649 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:55:27 crc kubenswrapper[5002]: I1203 16:55:27.642225 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 16:55:28 crc kubenswrapper[5002]: I1203 16:55:28.523965 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b576f304-fd7e-419e-937c-dafaf1c00970" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:55:28 crc kubenswrapper[5002]: I1203 16:55:28.524444 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b576f304-fd7e-419e-937c-dafaf1c00970" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:55:30 crc kubenswrapper[5002]: I1203 16:55:30.264404 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 16:55:30 crc kubenswrapper[5002]: I1203 16:55:30.264886 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 16:55:31 crc kubenswrapper[5002]: I1203 16:55:31.281959 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:55:31 crc kubenswrapper[5002]: I1203 16:55:31.281987 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:55:32 crc kubenswrapper[5002]: I1203 16:55:32.642877 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 16:55:32 crc kubenswrapper[5002]: I1203 16:55:32.693514 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 16:55:33 crc kubenswrapper[5002]: I1203 16:55:33.050820 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 16:55:35 crc kubenswrapper[5002]: I1203 16:55:35.318958 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 16:55:37 crc kubenswrapper[5002]: I1203 16:55:37.514284 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 16:55:37 crc kubenswrapper[5002]: I1203 16:55:37.515180 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 16:55:37 crc kubenswrapper[5002]: I1203 16:55:37.516855 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 16:55:37 crc kubenswrapper[5002]: I1203 16:55:37.523481 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 16:55:38 crc kubenswrapper[5002]: I1203 16:55:38.239991 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 16:55:38 crc kubenswrapper[5002]: I1203 16:55:38.252110 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 16:55:40 crc kubenswrapper[5002]: I1203 16:55:40.269628 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 16:55:40 crc kubenswrapper[5002]: I1203 16:55:40.270997 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 16:55:40 crc kubenswrapper[5002]: I1203 16:55:40.275035 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 16:55:40 crc kubenswrapper[5002]: I1203 16:55:40.275233 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 16:55:50 crc kubenswrapper[5002]: I1203 16:55:50.918225 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:55:50 crc kubenswrapper[5002]: I1203 16:55:50.919357 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:55:50 crc kubenswrapper[5002]: I1203 16:55:50.919429 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:55:50 crc kubenswrapper[5002]: I1203 16:55:50.920482 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"499c319fe6708e676b3e6316237c177a723be8309f300373241f6343d1f6ef57"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:55:50 crc kubenswrapper[5002]: I1203 16:55:50.920565 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://499c319fe6708e676b3e6316237c177a723be8309f300373241f6343d1f6ef57" gracePeriod=600 Dec 03 16:55:51 crc kubenswrapper[5002]: I1203 16:55:51.419192 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="499c319fe6708e676b3e6316237c177a723be8309f300373241f6343d1f6ef57" exitCode=0 Dec 03 16:55:51 crc kubenswrapper[5002]: I1203 16:55:51.419227 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"499c319fe6708e676b3e6316237c177a723be8309f300373241f6343d1f6ef57"} Dec 03 16:55:51 crc kubenswrapper[5002]: I1203 16:55:51.419730 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e"} Dec 03 16:55:51 crc kubenswrapper[5002]: I1203 16:55:51.419784 5002 scope.go:117] "RemoveContainer" containerID="80bed6e41a955a593b74ebe3d33480022d2c94cec0b0862556f213fb12fa5abe" Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.033137 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.033982 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="5a3af9fa-d550-4d97-8d54-b198f0ca6f31" containerName="nova-cell1-conductor-conductor" containerID="cri-o://66caf906a5bd8b8162200583f4b593bcda499f2ebcd4fa929fb768cd57da8948" gracePeriod=30 Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.120149 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.120584 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="922c692b-3d5c-45df-862d-d4e08b06fe0b" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://b7977fd2fc849375b5f24a1f92f0acd219b3d1bff05f0b2a40d3322e005c9013" gracePeriod=30 Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.339823 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.340066 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="1823be31-afb8-4085-a9a1-f1b75c65f3a2" containerName="openstackclient" containerID="cri-o://d908e5d007ac194ec3ebbceec4f006b453c1be2fbf1833110a61991b4704e296" gracePeriod=2 Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.355387 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.436811 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.437992 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="2f69c54d-bd52-413b-86b6-6b5c4ca765ba" containerName="openstack-network-exporter" containerID="cri-o://9fb4d5ac070c404dafdfc058deb933ce0850d4c297f125e77eac745370a46ed9" gracePeriod=300 Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.514442 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.644815 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.645207 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="36469a67-4d79-419f-9aaf-a1c128132287" containerName="cinder-scheduler" containerID="cri-o://4135f456101b8d8bc1d6aa3e05e38d4e5f0009555d8fd482836038f10a45877a" gracePeriod=30 Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.645833 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="36469a67-4d79-419f-9aaf-a1c128132287" containerName="probe" containerID="cri-o://4bc8f24bf14262c55e9f63c9738562230246183c67d42270335aee3f4e625213" gracePeriod=30 Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.696853 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glancea518-account-delete-tg6nl"] Dec 03 16:55:58 crc kubenswrapper[5002]: E1203 16:55:58.697351 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1823be31-afb8-4085-a9a1-f1b75c65f3a2" containerName="openstackclient" Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.697378 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1823be31-afb8-4085-a9a1-f1b75c65f3a2" containerName="openstackclient" Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.697613 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="1823be31-afb8-4085-a9a1-f1b75c65f3a2" containerName="openstackclient" Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.698386 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glancea518-account-delete-tg6nl" Dec 03 16:55:58 crc kubenswrapper[5002]: E1203 16:55:58.746119 5002 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 03 16:55:58 crc kubenswrapper[5002]: E1203 16:55:58.746208 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data podName:e7a598f1-0f32-448c-b08f-b5b0e70f583d nodeName:}" failed. No retries permitted until 2025-12-03 16:55:59.246184474 +0000 UTC m=+1482.660006362 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data") pod "rabbitmq-cell1-server-0" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d") : configmap "rabbitmq-cell1-config-data" not found Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.765695 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glancea518-account-delete-tg6nl"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.810310 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.811314 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="965b05ab-f8e9-485e-9f15-2160a598d8c2" containerName="cinder-api-log" containerID="cri-o://7289b73f03ab749a5e919fc53efa0fd71d6720a44c71c21cabbcd8b11406b07e" gracePeriod=30 Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.811615 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="965b05ab-f8e9-485e-9f15-2160a598d8c2" containerName="cinder-api" containerID="cri-o://2c7e91463c94f3cc50714dbf46be64e0f5e71d87a2dbb56ce0212f820db59c76" gracePeriod=30 Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.841655 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placemente6cd-account-delete-wpdgt"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.843093 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placemente6cd-account-delete-wpdgt" Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.844597 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc3c0d8b-823d-42bc-a114-766494075e59-operator-scripts\") pod \"glancea518-account-delete-tg6nl\" (UID: \"bc3c0d8b-823d-42bc-a114-766494075e59\") " pod="openstack/glancea518-account-delete-tg6nl" Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.844663 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpwdm\" (UniqueName: \"kubernetes.io/projected/bc3c0d8b-823d-42bc-a114-766494075e59-kube-api-access-qpwdm\") pod \"glancea518-account-delete-tg6nl\" (UID: \"bc3c0d8b-823d-42bc-a114-766494075e59\") " pod="openstack/glancea518-account-delete-tg6nl" Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.847397 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="2f69c54d-bd52-413b-86b6-6b5c4ca765ba" containerName="ovsdbserver-nb" containerID="cri-o://fea9cb37f60037b92107bb54b095e6f2b946d24fabe0764e7886f0b91713e11b" gracePeriod=300 Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.887470 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.887991 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="9eacefa0-a1f4-4181-ab8e-201efd0fc67e" containerName="openstack-network-exporter" containerID="cri-o://19fc75cf1b1a687269a530436d2398a6475ba55eef4ea65b1527d796cfd116c4" gracePeriod=300 Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.940468 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placemente6cd-account-delete-wpdgt"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.954499 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc3c0d8b-823d-42bc-a114-766494075e59-operator-scripts\") pod \"glancea518-account-delete-tg6nl\" (UID: \"bc3c0d8b-823d-42bc-a114-766494075e59\") " pod="openstack/glancea518-account-delete-tg6nl" Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.954617 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpwdm\" (UniqueName: \"kubernetes.io/projected/bc3c0d8b-823d-42bc-a114-766494075e59-kube-api-access-qpwdm\") pod \"glancea518-account-delete-tg6nl\" (UID: \"bc3c0d8b-823d-42bc-a114-766494075e59\") " pod="openstack/glancea518-account-delete-tg6nl" Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.954661 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvzmx\" (UniqueName: \"kubernetes.io/projected/05d0c2be-6525-4ec1-bcae-e240255c970a-kube-api-access-qvzmx\") pod \"placemente6cd-account-delete-wpdgt\" (UID: \"05d0c2be-6525-4ec1-bcae-e240255c970a\") " pod="openstack/placemente6cd-account-delete-wpdgt" Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.954724 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05d0c2be-6525-4ec1-bcae-e240255c970a-operator-scripts\") pod \"placemente6cd-account-delete-wpdgt\" (UID: \"05d0c2be-6525-4ec1-bcae-e240255c970a\") " pod="openstack/placemente6cd-account-delete-wpdgt" Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.956520 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc3c0d8b-823d-42bc-a114-766494075e59-operator-scripts\") pod \"glancea518-account-delete-tg6nl\" (UID: \"bc3c0d8b-823d-42bc-a114-766494075e59\") " pod="openstack/glancea518-account-delete-tg6nl" Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.991080 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.991388 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="29a68818-9346-4437-9527-aea9383c1a25" containerName="ovn-northd" containerID="cri-o://8d6e5fcf1f9f04b8b697e12bf0ae6fa6a1d7ab6a6a2509530bdd7448a85fdb39" gracePeriod=30 Dec 03 16:55:58 crc kubenswrapper[5002]: I1203 16:55:58.991547 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="29a68818-9346-4437-9527-aea9383c1a25" containerName="openstack-network-exporter" containerID="cri-o://d01d9155c2a1be73fba2d20cfcf3edb5d0d73c8658491e3d1015b359c087e0ad" gracePeriod=30 Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.005654 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-t6r47"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.022998 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-t6r47"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.027414 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpwdm\" (UniqueName: \"kubernetes.io/projected/bc3c0d8b-823d-42bc-a114-766494075e59-kube-api-access-qpwdm\") pod \"glancea518-account-delete-tg6nl\" (UID: \"bc3c0d8b-823d-42bc-a114-766494075e59\") " pod="openstack/glancea518-account-delete-tg6nl" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.055802 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutronb96b-account-delete-ttg76"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.057153 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronb96b-account-delete-ttg76" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.058570 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvzmx\" (UniqueName: \"kubernetes.io/projected/05d0c2be-6525-4ec1-bcae-e240255c970a-kube-api-access-qvzmx\") pod \"placemente6cd-account-delete-wpdgt\" (UID: \"05d0c2be-6525-4ec1-bcae-e240255c970a\") " pod="openstack/placemente6cd-account-delete-wpdgt" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.058625 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05d0c2be-6525-4ec1-bcae-e240255c970a-operator-scripts\") pod \"placemente6cd-account-delete-wpdgt\" (UID: \"05d0c2be-6525-4ec1-bcae-e240255c970a\") " pod="openstack/placemente6cd-account-delete-wpdgt" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.059391 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05d0c2be-6525-4ec1-bcae-e240255c970a-operator-scripts\") pod \"placemente6cd-account-delete-wpdgt\" (UID: \"05d0c2be-6525-4ec1-bcae-e240255c970a\") " pod="openstack/placemente6cd-account-delete-wpdgt" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.067224 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutronb96b-account-delete-ttg76"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.076724 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="9eacefa0-a1f4-4181-ab8e-201efd0fc67e" containerName="ovsdbserver-sb" containerID="cri-o://104487232d5020db87327b0c923d164a5647072cdec65c4bd2d6deffa890a181" gracePeriod=300 Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.098311 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glancea518-account-delete-tg6nl" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.117517 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvzmx\" (UniqueName: \"kubernetes.io/projected/05d0c2be-6525-4ec1-bcae-e240255c970a-kube-api-access-qvzmx\") pod \"placemente6cd-account-delete-wpdgt\" (UID: \"05d0c2be-6525-4ec1-bcae-e240255c970a\") " pod="openstack/placemente6cd-account-delete-wpdgt" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.160028 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13c5aef5-d2f5-4449-8cce-125cdf61d06b-operator-scripts\") pod \"neutronb96b-account-delete-ttg76\" (UID: \"13c5aef5-d2f5-4449-8cce-125cdf61d06b\") " pod="openstack/neutronb96b-account-delete-ttg76" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.160202 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmcjw\" (UniqueName: \"kubernetes.io/projected/13c5aef5-d2f5-4449-8cce-125cdf61d06b-kube-api-access-bmcjw\") pod \"neutronb96b-account-delete-ttg76\" (UID: \"13c5aef5-d2f5-4449-8cce-125cdf61d06b\") " pod="openstack/neutronb96b-account-delete-ttg76" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.202284 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placemente6cd-account-delete-wpdgt" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.263656 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmcjw\" (UniqueName: \"kubernetes.io/projected/13c5aef5-d2f5-4449-8cce-125cdf61d06b-kube-api-access-bmcjw\") pod \"neutronb96b-account-delete-ttg76\" (UID: \"13c5aef5-d2f5-4449-8cce-125cdf61d06b\") " pod="openstack/neutronb96b-account-delete-ttg76" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.263883 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13c5aef5-d2f5-4449-8cce-125cdf61d06b-operator-scripts\") pod \"neutronb96b-account-delete-ttg76\" (UID: \"13c5aef5-d2f5-4449-8cce-125cdf61d06b\") " pod="openstack/neutronb96b-account-delete-ttg76" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.268142 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13c5aef5-d2f5-4449-8cce-125cdf61d06b-operator-scripts\") pod \"neutronb96b-account-delete-ttg76\" (UID: \"13c5aef5-d2f5-4449-8cce-125cdf61d06b\") " pod="openstack/neutronb96b-account-delete-ttg76" Dec 03 16:55:59 crc kubenswrapper[5002]: E1203 16:55:59.268689 5002 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 03 16:55:59 crc kubenswrapper[5002]: E1203 16:55:59.268734 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data podName:e7a598f1-0f32-448c-b08f-b5b0e70f583d nodeName:}" failed. No retries permitted until 2025-12-03 16:56:00.268719919 +0000 UTC m=+1483.682541807 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data") pod "rabbitmq-cell1-server-0" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d") : configmap "rabbitmq-cell1-config-data" not found Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.344703 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican8d7a-account-delete-fx9r7"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.346933 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican8d7a-account-delete-fx9r7" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.376526 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmcjw\" (UniqueName: \"kubernetes.io/projected/13c5aef5-d2f5-4449-8cce-125cdf61d06b-kube-api-access-bmcjw\") pod \"neutronb96b-account-delete-ttg76\" (UID: \"13c5aef5-d2f5-4449-8cce-125cdf61d06b\") " pod="openstack/neutronb96b-account-delete-ttg76" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.445675 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-pk54x"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.473215 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9922091-0d6f-44cf-9b98-8b97a811ea26-operator-scripts\") pod \"barbican8d7a-account-delete-fx9r7\" (UID: \"a9922091-0d6f-44cf-9b98-8b97a811ea26\") " pod="openstack/barbican8d7a-account-delete-fx9r7" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.473286 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf66x\" (UniqueName: \"kubernetes.io/projected/a9922091-0d6f-44cf-9b98-8b97a811ea26-kube-api-access-mf66x\") pod \"barbican8d7a-account-delete-fx9r7\" (UID: \"a9922091-0d6f-44cf-9b98-8b97a811ea26\") " pod="openstack/barbican8d7a-account-delete-fx9r7" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.504262 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-s42fp"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.575883 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9922091-0d6f-44cf-9b98-8b97a811ea26-operator-scripts\") pod \"barbican8d7a-account-delete-fx9r7\" (UID: \"a9922091-0d6f-44cf-9b98-8b97a811ea26\") " pod="openstack/barbican8d7a-account-delete-fx9r7" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.575948 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf66x\" (UniqueName: \"kubernetes.io/projected/a9922091-0d6f-44cf-9b98-8b97a811ea26-kube-api-access-mf66x\") pod \"barbican8d7a-account-delete-fx9r7\" (UID: \"a9922091-0d6f-44cf-9b98-8b97a811ea26\") " pod="openstack/barbican8d7a-account-delete-fx9r7" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.577122 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9922091-0d6f-44cf-9b98-8b97a811ea26-operator-scripts\") pod \"barbican8d7a-account-delete-fx9r7\" (UID: \"a9922091-0d6f-44cf-9b98-8b97a811ea26\") " pod="openstack/barbican8d7a-account-delete-fx9r7" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.615531 5002 generic.go:334] "Generic (PLEG): container finished" podID="9eacefa0-a1f4-4181-ab8e-201efd0fc67e" containerID="19fc75cf1b1a687269a530436d2398a6475ba55eef4ea65b1527d796cfd116c4" exitCode=2 Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.615641 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"9eacefa0-a1f4-4181-ab8e-201efd0fc67e","Type":"ContainerDied","Data":"19fc75cf1b1a687269a530436d2398a6475ba55eef4ea65b1527d796cfd116c4"} Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.623693 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-zcdkn"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.633582 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_2f69c54d-bd52-413b-86b6-6b5c4ca765ba/ovsdbserver-nb/0.log" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.633627 5002 generic.go:334] "Generic (PLEG): container finished" podID="2f69c54d-bd52-413b-86b6-6b5c4ca765ba" containerID="9fb4d5ac070c404dafdfc058deb933ce0850d4c297f125e77eac745370a46ed9" exitCode=2 Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.633656 5002 generic.go:334] "Generic (PLEG): container finished" podID="2f69c54d-bd52-413b-86b6-6b5c4ca765ba" containerID="fea9cb37f60037b92107bb54b095e6f2b946d24fabe0764e7886f0b91713e11b" exitCode=143 Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.633715 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"2f69c54d-bd52-413b-86b6-6b5c4ca765ba","Type":"ContainerDied","Data":"9fb4d5ac070c404dafdfc058deb933ce0850d4c297f125e77eac745370a46ed9"} Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.633823 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"2f69c54d-bd52-413b-86b6-6b5c4ca765ba","Type":"ContainerDied","Data":"fea9cb37f60037b92107bb54b095e6f2b946d24fabe0764e7886f0b91713e11b"} Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.637991 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf66x\" (UniqueName: \"kubernetes.io/projected/a9922091-0d6f-44cf-9b98-8b97a811ea26-kube-api-access-mf66x\") pod \"barbican8d7a-account-delete-fx9r7\" (UID: \"a9922091-0d6f-44cf-9b98-8b97a811ea26\") " pod="openstack/barbican8d7a-account-delete-fx9r7" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.670565 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronb96b-account-delete-ttg76" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.690199 5002 generic.go:334] "Generic (PLEG): container finished" podID="965b05ab-f8e9-485e-9f15-2160a598d8c2" containerID="7289b73f03ab749a5e919fc53efa0fd71d6720a44c71c21cabbcd8b11406b07e" exitCode=143 Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.690287 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"965b05ab-f8e9-485e-9f15-2160a598d8c2","Type":"ContainerDied","Data":"7289b73f03ab749a5e919fc53efa0fd71d6720a44c71c21cabbcd8b11406b07e"} Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.715003 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-pk54x"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.726076 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican8d7a-account-delete-fx9r7" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.736927 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-f6jrb"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.755612 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican8d7a-account-delete-fx9r7"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.767228 5002 generic.go:334] "Generic (PLEG): container finished" podID="29a68818-9346-4437-9527-aea9383c1a25" containerID="d01d9155c2a1be73fba2d20cfcf3edb5d0d73c8658491e3d1015b359c087e0ad" exitCode=2 Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.767568 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"29a68818-9346-4437-9527-aea9383c1a25","Type":"ContainerDied","Data":"d01d9155c2a1be73fba2d20cfcf3edb5d0d73c8658491e3d1015b359c087e0ad"} Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.811170 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-7wlqt"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.821704 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-zcdkn"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.829679 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-s42fp"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.847497 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-7wlqt"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.855725 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-f6jrb"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.901869 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.902321 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" containerName="glance-log" containerID="cri-o://37c6c1754413201dda18005715d872051feb0d1aef179748ad24d74f8d9e9696" gracePeriod=30 Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.902480 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" containerName="glance-httpd" containerID="cri-o://d8f143f4c81641c282592b60722c3b70b5b045ac7605f798b8c288b1a27c879f" gracePeriod=30 Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.922979 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder5ccf-account-delete-j49bd"] Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.924253 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder5ccf-account-delete-j49bd" Dec 03 16:55:59 crc kubenswrapper[5002]: I1203 16:55:59.939822 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder5ccf-account-delete-j49bd"] Dec 03 16:55:59 crc kubenswrapper[5002]: E1203 16:55:59.975954 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fea9cb37f60037b92107bb54b095e6f2b946d24fabe0764e7886f0b91713e11b is running failed: container process not found" containerID="fea9cb37f60037b92107bb54b095e6f2b946d24fabe0764e7886f0b91713e11b" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 03 16:55:59 crc kubenswrapper[5002]: E1203 16:55:59.976313 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fea9cb37f60037b92107bb54b095e6f2b946d24fabe0764e7886f0b91713e11b is running failed: container process not found" containerID="fea9cb37f60037b92107bb54b095e6f2b946d24fabe0764e7886f0b91713e11b" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 03 16:55:59 crc kubenswrapper[5002]: E1203 16:55:59.976480 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fea9cb37f60037b92107bb54b095e6f2b946d24fabe0764e7886f0b91713e11b is running failed: container process not found" containerID="fea9cb37f60037b92107bb54b095e6f2b946d24fabe0764e7886f0b91713e11b" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 03 16:55:59 crc kubenswrapper[5002]: E1203 16:55:59.976505 5002 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fea9cb37f60037b92107bb54b095e6f2b946d24fabe0764e7886f0b91713e11b is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="2f69c54d-bd52-413b-86b6-6b5c4ca765ba" containerName="ovsdbserver-nb" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.006397 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26lhf\" (UniqueName: \"kubernetes.io/projected/de753fc7-23ae-4680-9d4c-11e5632d749d-kube-api-access-26lhf\") pod \"cinder5ccf-account-delete-j49bd\" (UID: \"de753fc7-23ae-4680-9d4c-11e5632d749d\") " pod="openstack/cinder5ccf-account-delete-j49bd" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.006467 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de753fc7-23ae-4680-9d4c-11e5632d749d-operator-scripts\") pod \"cinder5ccf-account-delete-j49bd\" (UID: \"de753fc7-23ae-4680-9d4c-11e5632d749d\") " pod="openstack/cinder5ccf-account-delete-j49bd" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.108881 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26lhf\" (UniqueName: \"kubernetes.io/projected/de753fc7-23ae-4680-9d4c-11e5632d749d-kube-api-access-26lhf\") pod \"cinder5ccf-account-delete-j49bd\" (UID: \"de753fc7-23ae-4680-9d4c-11e5632d749d\") " pod="openstack/cinder5ccf-account-delete-j49bd" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.109239 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de753fc7-23ae-4680-9d4c-11e5632d749d-operator-scripts\") pod \"cinder5ccf-account-delete-j49bd\" (UID: \"de753fc7-23ae-4680-9d4c-11e5632d749d\") " pod="openstack/cinder5ccf-account-delete-j49bd" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.127451 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de753fc7-23ae-4680-9d4c-11e5632d749d-operator-scripts\") pod \"cinder5ccf-account-delete-j49bd\" (UID: \"de753fc7-23ae-4680-9d4c-11e5632d749d\") " pod="openstack/cinder5ccf-account-delete-j49bd" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.135340 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26lhf\" (UniqueName: \"kubernetes.io/projected/de753fc7-23ae-4680-9d4c-11e5632d749d-kube-api-access-26lhf\") pod \"cinder5ccf-account-delete-j49bd\" (UID: \"de753fc7-23ae-4680-9d4c-11e5632d749d\") " pod="openstack/cinder5ccf-account-delete-j49bd" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.179006 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.236238 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-6495d47864-tf6dm"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.237049 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-6495d47864-tf6dm" podUID="1c099352-abbe-4c3a-9431-c854e5333420" containerName="placement-log" containerID="cri-o://baf09ec78f1fa62fb965a5e1d80a324a472bde92f7bf1614c38cde3288421bd3" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.237214 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-6495d47864-tf6dm" podUID="1c099352-abbe-4c3a-9431-c854e5333420" containerName="placement-api" containerID="cri-o://02d099c7b5a52a32907ff960d5be05cd37532b1c4db658f445b99342fec6b927" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.278152 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.278716 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-server" containerID="cri-o://fd82b64468e87bb52951423d662c96298a04577e50fb7dfae08cf95f6cb95f60" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279061 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-updater" containerID="cri-o://90c36b07d1cc9b89cddb1a2322982944e4e056074c328fde3c02146dc0e50229" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279195 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="swift-recon-cron" containerID="cri-o://b8210908d5d09bcdac92e5295a766ab5588450583f98221265c5dc1fcef219e0" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279226 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-auditor" containerID="cri-o://74b9cad1cdf521c7bfb58575456d72fa698d2f033219c2bcb6eeb10f75b16c25" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279245 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-expirer" containerID="cri-o://2259d0bf57741cf43caa6dace1c5a1419cb7906850811728b72c40313b3bf897" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279233 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="rsync" containerID="cri-o://51ef41e140012493084ca9a5ee4771bb67457963ca1eb5c801a48e1b0525b81d" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279326 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-replicator" containerID="cri-o://a95935c6703da840b357416ca976f8c112b4e33b4bab1975af78cf849c48467d" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279376 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-server" containerID="cri-o://a869513d51677c3c369edfd440f7ae36fb809c1de1a7e02989a558f03d0af6af" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279419 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-reaper" containerID="cri-o://4e9275333706b76f736d2575ea8bf99de9ae2e8e214e70763b642bd4b982174e" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279469 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-auditor" containerID="cri-o://8a698534e20e33c772ad47cef7ba71e2699abcc80985a3e4b85e7e699d61e5bc" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279515 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-replicator" containerID="cri-o://295d415111976ea10f436b97bb0e928bbba6fc843889cfdd2399f44adfc4cf57" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279695 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-replicator" containerID="cri-o://56b7bbd59ef17d3e48ad12ed59f364881c2e4bfa9e7ece40383f0ef190962e10" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279820 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-auditor" containerID="cri-o://818f3df7318a7e5d0b01bd79d58ad702a7eb69ce025a94d68c50fb54ed6f4b7d" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279882 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-server" containerID="cri-o://9d9bb1df438ed50da59ac696f69efde7c6a48d1828bcb3dd1620fd321d2b4d34" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.279278 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-updater" containerID="cri-o://14c3879759c2b66a2378417de3744de65dc49d534bafb30583646388375fa453" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: W1203 16:56:00.308850 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc3c0d8b_823d_42bc_a114_766494075e59.slice/crio-dd0d02cf94b6e3f87153b6254c77ead3fc37774780df2141f856c67f4f13fec2 WatchSource:0}: Error finding container dd0d02cf94b6e3f87153b6254c77ead3fc37774780df2141f856c67f4f13fec2: Status 404 returned error can't find the container with id dd0d02cf94b6e3f87153b6254c77ead3fc37774780df2141f856c67f4f13fec2 Dec 03 16:56:00 crc kubenswrapper[5002]: E1203 16:56:00.315117 5002 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 03 16:56:00 crc kubenswrapper[5002]: E1203 16:56:00.315333 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data podName:e7a598f1-0f32-448c-b08f-b5b0e70f583d nodeName:}" failed. No retries permitted until 2025-12-03 16:56:02.315317299 +0000 UTC m=+1485.729139177 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data") pod "rabbitmq-cell1-server-0" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d") : configmap "rabbitmq-cell1-config-data" not found Dec 03 16:56:00 crc kubenswrapper[5002]: E1203 16:56:00.319059 5002 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 03 16:56:00 crc kubenswrapper[5002]: E1203 16:56:00.319390 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data podName:382d6556-c45b-43dd-a4fa-16b3e91e0725 nodeName:}" failed. No retries permitted until 2025-12-03 16:56:00.81937845 +0000 UTC m=+1484.233200338 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data") pod "rabbitmq-server-0" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725") : configmap "rabbitmq-config-data" not found Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.332152 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-qnvnc"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.332402 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-qnvnc" podUID="9f59bead-66d7-4fcb-842f-e573fcadcf1f" containerName="openstack-network-exporter" containerID="cri-o://2f14606baed65302b953c10a81a0db167dba604320e45146e5757544b0bb9b0c" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.386214 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-jkwrt"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.428245 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder5ccf-account-delete-j49bd" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.437798 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-hnkdk"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.531958 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9cbcb645-dw9nm"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.532261 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" podUID="34a0fbfb-baac-41c7-8430-cb0e1720dfa5" containerName="dnsmasq-dns" containerID="cri-o://58c1385bcef3302471d6a081ef5d49065e260a3904a830edb4066b5c487279af" gracePeriod=10 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.554478 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-68bfc56b4f-vnlr5"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.554875 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-68bfc56b4f-vnlr5" podUID="14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" containerName="neutron-api" containerID="cri-o://483b4040379f35157fa205bb3c7495c4b0ffa2ae303d246e906b2954e53e03f5" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.555464 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-68bfc56b4f-vnlr5" podUID="14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" containerName="neutron-httpd" containerID="cri-o://7319e26425e43fd3866431755e6e1859112eae7b40a15901924150e6da469063" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.571070 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-cfw2x"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.589733 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.590218 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d105ad9d-fbca-4a0c-b188-a88a363756c2" containerName="glance-log" containerID="cri-o://c338030fdc0b9dd2210114f065d62d29e7ab549361d1a0383112a39e58c2dc38" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.590418 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d105ad9d-fbca-4a0c-b188-a88a363756c2" containerName="glance-httpd" containerID="cri-o://344937d693aca613f5d6c7658a05a5e864b67f0cdbdd5fe03d3655235754d316" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.616382 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-pltf4"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.644339 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-cfw2x"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.672665 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-pltf4"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.684433 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-df99c8b7f-mljz8"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.684676 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-df99c8b7f-mljz8" podUID="30820296-8679-481c-9466-014d473e51ee" containerName="proxy-httpd" containerID="cri-o://e81a6b5b2d06af54cddc4be90e4d4a13ac27bedc2032aaec6cf75ef0057328dd" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.685142 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-df99c8b7f-mljz8" podUID="30820296-8679-481c-9466-014d473e51ee" containerName="proxy-server" containerID="cri-o://9d77ab2291660608c77e2f3623a3656df2f34d2c652d9964617af8f0a234719c" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.695390 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapi8fda-account-delete-h8c2d"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.697038 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi8fda-account-delete-h8c2d" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.708620 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi8fda-account-delete-h8c2d"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.734837 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell0be01-account-delete-r7zk7"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.736316 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0be01-account-delete-r7zk7" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.763028 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell0be01-account-delete-r7zk7"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.813505 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-5dc7d59f9c-7zhx2"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.813845 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" podUID="aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" containerName="barbican-worker-log" containerID="cri-o://49cfe11824388fcd621bf5c8c3ab0301531171637a9eff8d8df877c7b70ebe69" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.814601 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" podUID="aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" containerName="barbican-worker" containerID="cri-o://8a095cbc28aea7f906ad627bf6f7c5227b893239574390ebaaeddf3c3b84026c" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.820933 5002 generic.go:334] "Generic (PLEG): container finished" podID="922c692b-3d5c-45df-862d-d4e08b06fe0b" containerID="b7977fd2fc849375b5f24a1f92f0acd219b3d1bff05f0b2a40d3322e005c9013" exitCode=0 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.821093 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"922c692b-3d5c-45df-862d-d4e08b06fe0b","Type":"ContainerDied","Data":"b7977fd2fc849375b5f24a1f92f0acd219b3d1bff05f0b2a40d3322e005c9013"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.825438 5002 generic.go:334] "Generic (PLEG): container finished" podID="d105ad9d-fbca-4a0c-b188-a88a363756c2" containerID="c338030fdc0b9dd2210114f065d62d29e7ab549361d1a0383112a39e58c2dc38" exitCode=143 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.829344 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d105ad9d-fbca-4a0c-b188-a88a363756c2","Type":"ContainerDied","Data":"c338030fdc0b9dd2210114f065d62d29e7ab549361d1a0383112a39e58c2dc38"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.829440 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.834326 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz5r2\" (UniqueName: \"kubernetes.io/projected/721437ce-17b0-434b-9604-197f795ed1d9-kube-api-access-wz5r2\") pod \"novacell0be01-account-delete-r7zk7\" (UID: \"721437ce-17b0-434b-9604-197f795ed1d9\") " pod="openstack/novacell0be01-account-delete-r7zk7" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.834364 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts\") pod \"novacell0be01-account-delete-r7zk7\" (UID: \"721437ce-17b0-434b-9604-197f795ed1d9\") " pod="openstack/novacell0be01-account-delete-r7zk7" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.834399 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f61f67-5ef5-41a9-8bea-3335115b78e9-operator-scripts\") pod \"novaapi8fda-account-delete-h8c2d\" (UID: \"33f61f67-5ef5-41a9-8bea-3335115b78e9\") " pod="openstack/novaapi8fda-account-delete-h8c2d" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.834442 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdrx2\" (UniqueName: \"kubernetes.io/projected/33f61f67-5ef5-41a9-8bea-3335115b78e9-kube-api-access-kdrx2\") pod \"novaapi8fda-account-delete-h8c2d\" (UID: \"33f61f67-5ef5-41a9-8bea-3335115b78e9\") " pod="openstack/novaapi8fda-account-delete-h8c2d" Dec 03 16:56:00 crc kubenswrapper[5002]: E1203 16:56:00.834633 5002 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 03 16:56:00 crc kubenswrapper[5002]: E1203 16:56:00.834686 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data podName:382d6556-c45b-43dd-a4fa-16b3e91e0725 nodeName:}" failed. No retries permitted until 2025-12-03 16:56:01.834668347 +0000 UTC m=+1485.248490235 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data") pod "rabbitmq-server-0" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725") : configmap "rabbitmq-config-data" not found Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.849291 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="2259d0bf57741cf43caa6dace1c5a1419cb7906850811728b72c40313b3bf897" exitCode=0 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.849368 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="14c3879759c2b66a2378417de3744de65dc49d534bafb30583646388375fa453" exitCode=0 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.849381 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="818f3df7318a7e5d0b01bd79d58ad702a7eb69ce025a94d68c50fb54ed6f4b7d" exitCode=0 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.849393 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="56b7bbd59ef17d3e48ad12ed59f364881c2e4bfa9e7ece40383f0ef190962e10" exitCode=0 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.849403 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="90c36b07d1cc9b89cddb1a2322982944e4e056074c328fde3c02146dc0e50229" exitCode=0 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.849412 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="74b9cad1cdf521c7bfb58575456d72fa698d2f033219c2bcb6eeb10f75b16c25" exitCode=0 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.849421 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="a95935c6703da840b357416ca976f8c112b4e33b4bab1975af78cf849c48467d" exitCode=0 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.849430 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="4e9275333706b76f736d2575ea8bf99de9ae2e8e214e70763b642bd4b982174e" exitCode=0 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.849438 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="8a698534e20e33c772ad47cef7ba71e2699abcc80985a3e4b85e7e699d61e5bc" exitCode=0 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.849447 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="295d415111976ea10f436b97bb0e928bbba6fc843889cfdd2399f44adfc4cf57" exitCode=0 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.851600 5002 generic.go:334] "Generic (PLEG): container finished" podID="34a0fbfb-baac-41c7-8430-cb0e1720dfa5" containerID="58c1385bcef3302471d6a081ef5d49065e260a3904a830edb4066b5c487279af" exitCode=0 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.886336 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bd61525-e5d2-4258-9ebb-1d0786953372" path="/var/lib/kubelet/pods/2bd61525-e5d2-4258-9ebb-1d0786953372/volumes" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.894644 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dddc7b5-9eb0-495e-b81c-45f085598280" path="/var/lib/kubelet/pods/3dddc7b5-9eb0-495e-b81c-45f085598280/volumes" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.894712 5002 generic.go:334] "Generic (PLEG): container finished" podID="ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" containerID="37c6c1754413201dda18005715d872051feb0d1aef179748ad24d74f8d9e9696" exitCode=143 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.895278 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cfaf87a-9c55-42fa-8083-490f6c936d04" path="/var/lib/kubelet/pods/4cfaf87a-9c55-42fa-8083-490f6c936d04/volumes" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.895817 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fd124dd-d66f-4420-a183-383fc6c2ea02" path="/var/lib/kubelet/pods/8fd124dd-d66f-4420-a183-383fc6c2ea02/volumes" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.896850 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99fb9387-8126-480b-a909-f826c3ac626d" path="/var/lib/kubelet/pods/99fb9387-8126-480b-a909-f826c3ac626d/volumes" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.897888 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6dbbdb3-c51b-4f76-8605-04bcfaa83451" path="/var/lib/kubelet/pods/b6dbbdb3-c51b-4f76-8605-04bcfaa83451/volumes" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.898762 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf3779ea-e418-4c90-9c5e-74e0c8590c75" path="/var/lib/kubelet/pods/cf3779ea-e418-4c90-9c5e-74e0c8590c75/volumes" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.899349 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b" path="/var/lib/kubelet/pods/db5d53ca-ee4d-4dfa-ad25-97f7efd5f27b/volumes" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900491 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"2259d0bf57741cf43caa6dace1c5a1419cb7906850811728b72c40313b3bf897"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900524 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"14c3879759c2b66a2378417de3744de65dc49d534bafb30583646388375fa453"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900536 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"818f3df7318a7e5d0b01bd79d58ad702a7eb69ce025a94d68c50fb54ed6f4b7d"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900546 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900568 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-598cb58b8b-ww24g"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900581 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"56b7bbd59ef17d3e48ad12ed59f364881c2e4bfa9e7ece40383f0ef190962e10"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900596 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"90c36b07d1cc9b89cddb1a2322982944e4e056074c328fde3c02146dc0e50229"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900606 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900619 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"74b9cad1cdf521c7bfb58575456d72fa698d2f033219c2bcb6eeb10f75b16c25"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900627 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"a95935c6703da840b357416ca976f8c112b4e33b4bab1975af78cf849c48467d"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900636 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"4e9275333706b76f736d2575ea8bf99de9ae2e8e214e70763b642bd4b982174e"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900644 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"8a698534e20e33c772ad47cef7ba71e2699abcc80985a3e4b85e7e699d61e5bc"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900815 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"295d415111976ea10f436b97bb0e928bbba6fc843889cfdd2399f44adfc4cf57"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900854 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" event={"ID":"34a0fbfb-baac-41c7-8430-cb0e1720dfa5","Type":"ContainerDied","Data":"58c1385bcef3302471d6a081ef5d49065e260a3904a830edb4066b5c487279af"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900864 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" podUID="40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" containerName="barbican-keystone-listener-log" containerID="cri-o://7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900875 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929","Type":"ContainerDied","Data":"37c6c1754413201dda18005715d872051feb0d1aef179748ad24d74f8d9e9696"} Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.900990 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" podUID="40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" containerName="barbican-keystone-listener" containerID="cri-o://58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.902021 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerName="nova-metadata-log" containerID="cri-o://5e6958690a01bcd45b235c0f78e4750c3aa535d1ce524a8ad09dfce393a6bba0" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.902128 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerName="nova-metadata-metadata" containerID="cri-o://a31812f2b35f2e5582ee415727310457b192635bb15360c9e32d188a23358eea" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.904559 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6b89c68cbb-nkz44"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.907596 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6b89c68cbb-nkz44" podUID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" containerName="barbican-api-log" containerID="cri-o://7782f9e82bd0b16c1ea4af876571de089d43551370a7363494d32a492da6fdb9" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.908080 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6b89c68cbb-nkz44" podUID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" containerName="barbican-api" containerID="cri-o://2ac72ccfe2ed308920637f371bbf7fd278ac3de12deca33a58820b774a39eb81" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.912820 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.913209 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b576f304-fd7e-419e-937c-dafaf1c00970" containerName="nova-api-log" containerID="cri-o://b860ed590860a3a1a54cd9b7b0258210275e0aeba1d0db0458d40dced1fb66a7" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.913392 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b576f304-fd7e-419e-937c-dafaf1c00970" containerName="nova-api-api" containerID="cri-o://a5c953152a2dc657ee61cb4f66aa54e6b56055c989e05626225aa5ceeb69b1b2" gracePeriod=30 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.925837 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-jqnpw"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.937124 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-jqnpw"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.938297 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz5r2\" (UniqueName: \"kubernetes.io/projected/721437ce-17b0-434b-9604-197f795ed1d9-kube-api-access-wz5r2\") pod \"novacell0be01-account-delete-r7zk7\" (UID: \"721437ce-17b0-434b-9604-197f795ed1d9\") " pod="openstack/novacell0be01-account-delete-r7zk7" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.938322 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts\") pod \"novacell0be01-account-delete-r7zk7\" (UID: \"721437ce-17b0-434b-9604-197f795ed1d9\") " pod="openstack/novacell0be01-account-delete-r7zk7" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.938356 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f61f67-5ef5-41a9-8bea-3335115b78e9-operator-scripts\") pod \"novaapi8fda-account-delete-h8c2d\" (UID: \"33f61f67-5ef5-41a9-8bea-3335115b78e9\") " pod="openstack/novaapi8fda-account-delete-h8c2d" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.938408 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdrx2\" (UniqueName: \"kubernetes.io/projected/33f61f67-5ef5-41a9-8bea-3335115b78e9-kube-api-access-kdrx2\") pod \"novaapi8fda-account-delete-h8c2d\" (UID: \"33f61f67-5ef5-41a9-8bea-3335115b78e9\") " pod="openstack/novaapi8fda-account-delete-h8c2d" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.940446 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts\") pod \"novacell0be01-account-delete-r7zk7\" (UID: \"721437ce-17b0-434b-9604-197f795ed1d9\") " pod="openstack/novacell0be01-account-delete-r7zk7" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.943007 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f61f67-5ef5-41a9-8bea-3335115b78e9-operator-scripts\") pod \"novaapi8fda-account-delete-h8c2d\" (UID: \"33f61f67-5ef5-41a9-8bea-3335115b78e9\") " pod="openstack/novaapi8fda-account-delete-h8c2d" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.970796 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-f979-account-create-update-zqkjf"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.971999 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-f979-account-create-update-zqkjf"] Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.974648 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz5r2\" (UniqueName: \"kubernetes.io/projected/721437ce-17b0-434b-9604-197f795ed1d9-kube-api-access-wz5r2\") pod \"novacell0be01-account-delete-r7zk7\" (UID: \"721437ce-17b0-434b-9604-197f795ed1d9\") " pod="openstack/novacell0be01-account-delete-r7zk7" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.990231 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="e7a598f1-0f32-448c-b08f-b5b0e70f583d" containerName="rabbitmq" containerID="cri-o://d73705cc9fa58d7cab153dcce4358e73807cf1f2ea894bb4e2604b295891a9c0" gracePeriod=604800 Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.990904 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdrx2\" (UniqueName: \"kubernetes.io/projected/33f61f67-5ef5-41a9-8bea-3335115b78e9-kube-api-access-kdrx2\") pod \"novaapi8fda-account-delete-h8c2d\" (UID: \"33f61f67-5ef5-41a9-8bea-3335115b78e9\") " pod="openstack/novaapi8fda-account-delete-h8c2d" Dec 03 16:56:00 crc kubenswrapper[5002]: I1203 16:56:00.996804 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glancea518-account-delete-tg6nl"] Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.019161 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"36469a67-4d79-419f-9aaf-a1c128132287","Type":"ContainerDied","Data":"4bc8f24bf14262c55e9f63c9738562230246183c67d42270335aee3f4e625213"} Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.028713 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.010047 5002 generic.go:334] "Generic (PLEG): container finished" podID="36469a67-4d79-419f-9aaf-a1c128132287" containerID="4bc8f24bf14262c55e9f63c9738562230246183c67d42270335aee3f4e625213" exitCode=0 Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.035093 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi8fda-account-delete-h8c2d" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.049860 5002 generic.go:334] "Generic (PLEG): container finished" podID="1823be31-afb8-4085-a9a1-f1b75c65f3a2" containerID="d908e5d007ac194ec3ebbceec4f006b453c1be2fbf1833110a61991b4704e296" exitCode=137 Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.058108 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0be01-account-delete-r7zk7" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.062352 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.062598 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7" containerName="nova-cell0-conductor-conductor" containerID="cri-o://fcdafeed97d1bf71baec2b1679ea4a83c44d81bf0870a234df0c8a45883f5c37" gracePeriod=30 Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.103499 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-qnvnc_9f59bead-66d7-4fcb-842f-e573fcadcf1f/openstack-network-exporter/0.log" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.103555 5002 generic.go:334] "Generic (PLEG): container finished" podID="9f59bead-66d7-4fcb-842f-e573fcadcf1f" containerID="2f14606baed65302b953c10a81a0db167dba604320e45146e5757544b0bb9b0c" exitCode=2 Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.103636 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qnvnc" event={"ID":"9f59bead-66d7-4fcb-842f-e573fcadcf1f","Type":"ContainerDied","Data":"2f14606baed65302b953c10a81a0db167dba604320e45146e5757544b0bb9b0c"} Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.107726 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_2f69c54d-bd52-413b-86b6-6b5c4ca765ba/ovsdbserver-nb/0.log" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.107830 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.115831 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jrk64"] Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.119888 5002 generic.go:334] "Generic (PLEG): container finished" podID="1c099352-abbe-4c3a-9431-c854e5333420" containerID="baf09ec78f1fa62fb965a5e1d80a324a472bde92f7bf1614c38cde3288421bd3" exitCode=143 Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.119961 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6495d47864-tf6dm" event={"ID":"1c099352-abbe-4c3a-9431-c854e5333420","Type":"ContainerDied","Data":"baf09ec78f1fa62fb965a5e1d80a324a472bde92f7bf1614c38cde3288421bd3"} Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.125664 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancea518-account-delete-tg6nl" event={"ID":"bc3c0d8b-823d-42bc-a114-766494075e59","Type":"ContainerStarted","Data":"dd0d02cf94b6e3f87153b6254c77ead3fc37774780df2141f856c67f4f13fec2"} Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.139088 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jrk64"] Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.178141 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_9eacefa0-a1f4-4181-ab8e-201efd0fc67e/ovsdbserver-sb/0.log" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.178687 5002 generic.go:334] "Generic (PLEG): container finished" podID="9eacefa0-a1f4-4181-ab8e-201efd0fc67e" containerID="104487232d5020db87327b0c923d164a5647072cdec65c4bd2d6deffa890a181" exitCode=143 Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.179053 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"9eacefa0-a1f4-4181-ab8e-201efd0fc67e","Type":"ContainerDied","Data":"104487232d5020db87327b0c923d164a5647072cdec65c4bd2d6deffa890a181"} Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.181729 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="e7a598f1-0f32-448c-b08f-b5b0e70f583d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.103:5671: connect: connection refused" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.210653 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-2zspw"] Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.222036 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-2zspw"] Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.228857 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.229143 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="86921476-d5b9-4fc0-86d1-aa82dd931e5f" containerName="nova-scheduler-scheduler" containerID="cri-o://9bf24b4c96cc80a30cc1370d78b90b73f20e379aa3232516e9166ca4be82531b" gracePeriod=30 Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.252495 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovs-vswitchd" containerID="cri-o://226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" gracePeriod=30 Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.253291 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-metrics-certs-tls-certs\") pod \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.253324 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-combined-ca-bundle\") pod \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.253349 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-ovsdbserver-nb-tls-certs\") pod \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.253424 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-scripts\") pod \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.253532 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-config\") pod \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.253604 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.253630 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gjnr\" (UniqueName: \"kubernetes.io/projected/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-kube-api-access-8gjnr\") pod \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.253650 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-ovsdb-rundir\") pod \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\" (UID: \"2f69c54d-bd52-413b-86b6-6b5c4ca765ba\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.255542 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "2f69c54d-bd52-413b-86b6-6b5c4ca765ba" (UID: "2f69c54d-bd52-413b-86b6-6b5c4ca765ba"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.256095 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-scripts" (OuterVolumeSpecName: "scripts") pod "2f69c54d-bd52-413b-86b6-6b5c4ca765ba" (UID: "2f69c54d-bd52-413b-86b6-6b5c4ca765ba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.256258 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-config" (OuterVolumeSpecName: "config") pod "2f69c54d-bd52-413b-86b6-6b5c4ca765ba" (UID: "2f69c54d-bd52-413b-86b6-6b5c4ca765ba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.257801 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placemente6cd-account-delete-wpdgt"] Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.272349 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="382d6556-c45b-43dd-a4fa-16b3e91e0725" containerName="rabbitmq" containerID="cri-o://4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8" gracePeriod=604800 Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.277184 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "2f69c54d-bd52-413b-86b6-6b5c4ca765ba" (UID: "2f69c54d-bd52-413b-86b6-6b5c4ca765ba"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.291720 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-kube-api-access-8gjnr" (OuterVolumeSpecName: "kube-api-access-8gjnr") pod "2f69c54d-bd52-413b-86b6-6b5c4ca765ba" (UID: "2f69c54d-bd52-413b-86b6-6b5c4ca765ba"). InnerVolumeSpecName "kube-api-access-8gjnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.349618 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_9eacefa0-a1f4-4181-ab8e-201efd0fc67e/ovsdbserver-sb/0.log" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.349680 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.356016 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.356044 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gjnr\" (UniqueName: \"kubernetes.io/projected/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-kube-api-access-8gjnr\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.356054 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.356062 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.356070 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.398724 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.442021 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2f69c54d-bd52-413b-86b6-6b5c4ca765ba" (UID: "2f69c54d-bd52-413b-86b6-6b5c4ca765ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.457527 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-metrics-certs-tls-certs\") pod \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.457586 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-scripts\") pod \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.457624 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-ovsdbserver-sb-tls-certs\") pod \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.457641 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfcv5\" (UniqueName: \"kubernetes.io/projected/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-kube-api-access-bfcv5\") pod \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.457657 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-combined-ca-bundle\") pod \"922c692b-3d5c-45df-862d-d4e08b06fe0b\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.457679 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-combined-ca-bundle\") pod \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.459038 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-config\") pod \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.459075 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.459136 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-ovsdb-rundir\") pod \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\" (UID: \"9eacefa0-a1f4-4181-ab8e-201efd0fc67e\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.459257 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-nova-novncproxy-tls-certs\") pod \"922c692b-3d5c-45df-862d-d4e08b06fe0b\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.459312 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-vencrypt-tls-certs\") pod \"922c692b-3d5c-45df-862d-d4e08b06fe0b\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.459335 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-config-data\") pod \"922c692b-3d5c-45df-862d-d4e08b06fe0b\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.459372 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5t44c\" (UniqueName: \"kubernetes.io/projected/922c692b-3d5c-45df-862d-d4e08b06fe0b-kube-api-access-5t44c\") pod \"922c692b-3d5c-45df-862d-d4e08b06fe0b\" (UID: \"922c692b-3d5c-45df-862d-d4e08b06fe0b\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.460091 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.461252 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="b132eed4-cb4d-4abc-b49a-55688686288d" containerName="galera" containerID="cri-o://79a5eb86d5fe80c84e8ed1927b16af9e3b75324f11a9482bafaacf52ecc8aa98" gracePeriod=30 Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.462690 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "9eacefa0-a1f4-4181-ab8e-201efd0fc67e" (UID: "9eacefa0-a1f4-4181-ab8e-201efd0fc67e"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.463536 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-config" (OuterVolumeSpecName: "config") pod "9eacefa0-a1f4-4181-ab8e-201efd0fc67e" (UID: "9eacefa0-a1f4-4181-ab8e-201efd0fc67e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.465026 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-scripts" (OuterVolumeSpecName: "scripts") pod "9eacefa0-a1f4-4181-ab8e-201efd0fc67e" (UID: "9eacefa0-a1f4-4181-ab8e-201efd0fc67e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.471094 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.472451 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-kube-api-access-bfcv5" (OuterVolumeSpecName: "kube-api-access-bfcv5") pod "9eacefa0-a1f4-4181-ab8e-201efd0fc67e" (UID: "9eacefa0-a1f4-4181-ab8e-201efd0fc67e"). InnerVolumeSpecName "kube-api-access-bfcv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.477067 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/922c692b-3d5c-45df-862d-d4e08b06fe0b-kube-api-access-5t44c" (OuterVolumeSpecName: "kube-api-access-5t44c") pod "922c692b-3d5c-45df-862d-d4e08b06fe0b" (UID: "922c692b-3d5c-45df-862d-d4e08b06fe0b"). InnerVolumeSpecName "kube-api-access-5t44c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.493025 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "9eacefa0-a1f4-4181-ab8e-201efd0fc67e" (UID: "9eacefa0-a1f4-4181-ab8e-201efd0fc67e"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.493090 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 16:56:01 crc kubenswrapper[5002]: E1203 16:56:01.526269 5002 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Dec 03 16:56:01 crc kubenswrapper[5002]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 03 16:56:01 crc kubenswrapper[5002]: + source /usr/local/bin/container-scripts/functions Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNBridge=br-int Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNRemote=tcp:localhost:6642 Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNEncapType=geneve Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNAvailabilityZones= Dec 03 16:56:01 crc kubenswrapper[5002]: ++ EnableChassisAsGateway=true Dec 03 16:56:01 crc kubenswrapper[5002]: ++ PhysicalNetworks= Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNHostName= Dec 03 16:56:01 crc kubenswrapper[5002]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 03 16:56:01 crc kubenswrapper[5002]: ++ ovs_dir=/var/lib/openvswitch Dec 03 16:56:01 crc kubenswrapper[5002]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 03 16:56:01 crc kubenswrapper[5002]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 03 16:56:01 crc kubenswrapper[5002]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 03 16:56:01 crc kubenswrapper[5002]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 03 16:56:01 crc kubenswrapper[5002]: + sleep 0.5 Dec 03 16:56:01 crc kubenswrapper[5002]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 03 16:56:01 crc kubenswrapper[5002]: + sleep 0.5 Dec 03 16:56:01 crc kubenswrapper[5002]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 03 16:56:01 crc kubenswrapper[5002]: + cleanup_ovsdb_server_semaphore Dec 03 16:56:01 crc kubenswrapper[5002]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 03 16:56:01 crc kubenswrapper[5002]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 03 16:56:01 crc kubenswrapper[5002]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-jkwrt" message=< Dec 03 16:56:01 crc kubenswrapper[5002]: Exiting ovsdb-server (5) [ OK ] Dec 03 16:56:01 crc kubenswrapper[5002]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 03 16:56:01 crc kubenswrapper[5002]: + source /usr/local/bin/container-scripts/functions Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNBridge=br-int Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNRemote=tcp:localhost:6642 Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNEncapType=geneve Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNAvailabilityZones= Dec 03 16:56:01 crc kubenswrapper[5002]: ++ EnableChassisAsGateway=true Dec 03 16:56:01 crc kubenswrapper[5002]: ++ PhysicalNetworks= Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNHostName= Dec 03 16:56:01 crc kubenswrapper[5002]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 03 16:56:01 crc kubenswrapper[5002]: ++ ovs_dir=/var/lib/openvswitch Dec 03 16:56:01 crc kubenswrapper[5002]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 03 16:56:01 crc kubenswrapper[5002]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 03 16:56:01 crc kubenswrapper[5002]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 03 16:56:01 crc kubenswrapper[5002]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 03 16:56:01 crc kubenswrapper[5002]: + sleep 0.5 Dec 03 16:56:01 crc kubenswrapper[5002]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 03 16:56:01 crc kubenswrapper[5002]: + sleep 0.5 Dec 03 16:56:01 crc kubenswrapper[5002]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 03 16:56:01 crc kubenswrapper[5002]: + cleanup_ovsdb_server_semaphore Dec 03 16:56:01 crc kubenswrapper[5002]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 03 16:56:01 crc kubenswrapper[5002]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 03 16:56:01 crc kubenswrapper[5002]: > Dec 03 16:56:01 crc kubenswrapper[5002]: E1203 16:56:01.526317 5002 kuberuntime_container.go:691] "PreStop hook failed" err=< Dec 03 16:56:01 crc kubenswrapper[5002]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 03 16:56:01 crc kubenswrapper[5002]: + source /usr/local/bin/container-scripts/functions Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNBridge=br-int Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNRemote=tcp:localhost:6642 Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNEncapType=geneve Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNAvailabilityZones= Dec 03 16:56:01 crc kubenswrapper[5002]: ++ EnableChassisAsGateway=true Dec 03 16:56:01 crc kubenswrapper[5002]: ++ PhysicalNetworks= Dec 03 16:56:01 crc kubenswrapper[5002]: ++ OVNHostName= Dec 03 16:56:01 crc kubenswrapper[5002]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 03 16:56:01 crc kubenswrapper[5002]: ++ ovs_dir=/var/lib/openvswitch Dec 03 16:56:01 crc kubenswrapper[5002]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 03 16:56:01 crc kubenswrapper[5002]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 03 16:56:01 crc kubenswrapper[5002]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 03 16:56:01 crc kubenswrapper[5002]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 03 16:56:01 crc kubenswrapper[5002]: + sleep 0.5 Dec 03 16:56:01 crc kubenswrapper[5002]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 03 16:56:01 crc kubenswrapper[5002]: + sleep 0.5 Dec 03 16:56:01 crc kubenswrapper[5002]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 03 16:56:01 crc kubenswrapper[5002]: + cleanup_ovsdb_server_semaphore Dec 03 16:56:01 crc kubenswrapper[5002]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 03 16:56:01 crc kubenswrapper[5002]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 03 16:56:01 crc kubenswrapper[5002]: > pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server" containerID="cri-o://698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.526359 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server" containerID="cri-o://698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" gracePeriod=29 Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.561364 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1823be31-afb8-4085-a9a1-f1b75c65f3a2-combined-ca-bundle\") pod \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.561413 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1823be31-afb8-4085-a9a1-f1b75c65f3a2-openstack-config\") pod \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.561470 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1823be31-afb8-4085-a9a1-f1b75c65f3a2-openstack-config-secret\") pod \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.561558 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqtz9\" (UniqueName: \"kubernetes.io/projected/1823be31-afb8-4085-a9a1-f1b75c65f3a2-kube-api-access-qqtz9\") pod \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\" (UID: \"1823be31-afb8-4085-a9a1-f1b75c65f3a2\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.562095 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.562118 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5t44c\" (UniqueName: \"kubernetes.io/projected/922c692b-3d5c-45df-862d-d4e08b06fe0b-kube-api-access-5t44c\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.562133 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.562143 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfcv5\" (UniqueName: \"kubernetes.io/projected/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-kube-api-access-bfcv5\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.562154 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.562176 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.562189 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.582016 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1823be31-afb8-4085-a9a1-f1b75c65f3a2-kube-api-access-qqtz9" (OuterVolumeSpecName: "kube-api-access-qqtz9") pod "1823be31-afb8-4085-a9a1-f1b75c65f3a2" (UID: "1823be31-afb8-4085-a9a1-f1b75c65f3a2"). InnerVolumeSpecName "kube-api-access-qqtz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.611302 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-df99c8b7f-mljz8" podUID="30820296-8679-481c-9466-014d473e51ee" containerName="proxy-server" probeResult="failure" output="Get \"https://10.217.0.166:8080/healthcheck\": dial tcp 10.217.0.166:8080: connect: connection refused" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.611973 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-df99c8b7f-mljz8" podUID="30820296-8679-481c-9466-014d473e51ee" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.166:8080/healthcheck\": dial tcp 10.217.0.166:8080: connect: connection refused" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.662056 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-qnvnc_9f59bead-66d7-4fcb-842f-e573fcadcf1f/openstack-network-exporter/0.log" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.662119 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.663954 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqtz9\" (UniqueName: \"kubernetes.io/projected/1823be31-afb8-4085-a9a1-f1b75c65f3a2-kube-api-access-qqtz9\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.672884 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican8d7a-account-delete-fx9r7"] Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.692041 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-config-data" (OuterVolumeSpecName: "config-data") pod "922c692b-3d5c-45df-862d-d4e08b06fe0b" (UID: "922c692b-3d5c-45df-862d-d4e08b06fe0b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.695586 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1823be31-afb8-4085-a9a1-f1b75c65f3a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1823be31-afb8-4085-a9a1-f1b75c65f3a2" (UID: "1823be31-afb8-4085-a9a1-f1b75c65f3a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.698060 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutronb96b-account-delete-ttg76"] Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.749962 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "922c692b-3d5c-45df-862d-d4e08b06fe0b" (UID: "922c692b-3d5c-45df-862d-d4e08b06fe0b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.759715 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9eacefa0-a1f4-4181-ab8e-201efd0fc67e" (UID: "9eacefa0-a1f4-4181-ab8e-201efd0fc67e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.766280 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9f59bead-66d7-4fcb-842f-e573fcadcf1f-ovs-rundir\") pod \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.766386 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f59bead-66d7-4fcb-842f-e573fcadcf1f-metrics-certs-tls-certs\") pod \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.766395 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9f59bead-66d7-4fcb-842f-e573fcadcf1f-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "9f59bead-66d7-4fcb-842f-e573fcadcf1f" (UID: "9f59bead-66d7-4fcb-842f-e573fcadcf1f"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.766447 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9f59bead-66d7-4fcb-842f-e573fcadcf1f-ovn-rundir\") pod \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.766486 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f59bead-66d7-4fcb-842f-e573fcadcf1f-combined-ca-bundle\") pod \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.766519 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9f59bead-66d7-4fcb-842f-e573fcadcf1f-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "9f59bead-66d7-4fcb-842f-e573fcadcf1f" (UID: "9f59bead-66d7-4fcb-842f-e573fcadcf1f"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.766553 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f59bead-66d7-4fcb-842f-e573fcadcf1f-config\") pod \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.766615 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56vvz\" (UniqueName: \"kubernetes.io/projected/9f59bead-66d7-4fcb-842f-e573fcadcf1f-kube-api-access-56vvz\") pod \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\" (UID: \"9f59bead-66d7-4fcb-842f-e573fcadcf1f\") " Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.767041 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.767053 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.767062 5002 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9f59bead-66d7-4fcb-842f-e573fcadcf1f-ovs-rundir\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.767070 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1823be31-afb8-4085-a9a1-f1b75c65f3a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.770249 5002 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9f59bead-66d7-4fcb-842f-e573fcadcf1f-ovn-rundir\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.770262 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.767800 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f59bead-66d7-4fcb-842f-e573fcadcf1f-config" (OuterVolumeSpecName: "config") pod "9f59bead-66d7-4fcb-842f-e573fcadcf1f" (UID: "9f59bead-66d7-4fcb-842f-e573fcadcf1f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.769947 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.793092 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f59bead-66d7-4fcb-842f-e573fcadcf1f-kube-api-access-56vvz" (OuterVolumeSpecName: "kube-api-access-56vvz") pod "9f59bead-66d7-4fcb-842f-e573fcadcf1f" (UID: "9f59bead-66d7-4fcb-842f-e573fcadcf1f"). InnerVolumeSpecName "kube-api-access-56vvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.803937 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "2f69c54d-bd52-413b-86b6-6b5c4ca765ba" (UID: "2f69c54d-bd52-413b-86b6-6b5c4ca765ba"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.873799 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f59bead-66d7-4fcb-842f-e573fcadcf1f-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.873829 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56vvz\" (UniqueName: \"kubernetes.io/projected/9f59bead-66d7-4fcb-842f-e573fcadcf1f-kube-api-access-56vvz\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.873841 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.873849 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: E1203 16:56:01.873905 5002 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 03 16:56:01 crc kubenswrapper[5002]: E1203 16:56:01.873949 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data podName:382d6556-c45b-43dd-a4fa-16b3e91e0725 nodeName:}" failed. No retries permitted until 2025-12-03 16:56:03.873934877 +0000 UTC m=+1487.287756765 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data") pod "rabbitmq-server-0" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725") : configmap "rabbitmq-config-data" not found Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.890691 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "2f69c54d-bd52-413b-86b6-6b5c4ca765ba" (UID: "2f69c54d-bd52-413b-86b6-6b5c4ca765ba"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.895501 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1823be31-afb8-4085-a9a1-f1b75c65f3a2-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "1823be31-afb8-4085-a9a1-f1b75c65f3a2" (UID: "1823be31-afb8-4085-a9a1-f1b75c65f3a2"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.946993 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "922c692b-3d5c-45df-862d-d4e08b06fe0b" (UID: "922c692b-3d5c-45df-862d-d4e08b06fe0b"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.976945 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1823be31-afb8-4085-a9a1-f1b75c65f3a2-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.976990 5002 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.977033 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f69c54d-bd52-413b-86b6-6b5c4ca765ba-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.981347 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1823be31-afb8-4085-a9a1-f1b75c65f3a2-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "1823be31-afb8-4085-a9a1-f1b75c65f3a2" (UID: "1823be31-afb8-4085-a9a1-f1b75c65f3a2"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:01 crc kubenswrapper[5002]: I1203 16:56:01.983168 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "9eacefa0-a1f4-4181-ab8e-201efd0fc67e" (UID: "9eacefa0-a1f4-4181-ab8e-201efd0fc67e"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.002667 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "922c692b-3d5c-45df-862d-d4e08b06fe0b" (UID: "922c692b-3d5c-45df-862d-d4e08b06fe0b"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.045724 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder5ccf-account-delete-j49bd"] Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.072087 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f59bead-66d7-4fcb-842f-e573fcadcf1f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f59bead-66d7-4fcb-842f-e573fcadcf1f" (UID: "9f59bead-66d7-4fcb-842f-e573fcadcf1f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.074292 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f59bead-66d7-4fcb-842f-e573fcadcf1f-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "9f59bead-66d7-4fcb-842f-e573fcadcf1f" (UID: "9f59bead-66d7-4fcb-842f-e573fcadcf1f"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.078682 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.078723 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f59bead-66d7-4fcb-842f-e573fcadcf1f-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.078735 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f59bead-66d7-4fcb-842f-e573fcadcf1f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.078765 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1823be31-afb8-4085-a9a1-f1b75c65f3a2-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.078778 5002 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/922c692b-3d5c-45df-862d-d4e08b06fe0b-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.083247 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "9eacefa0-a1f4-4181-ab8e-201efd0fc67e" (UID: "9eacefa0-a1f4-4181-ab8e-201efd0fc67e"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.180788 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eacefa0-a1f4-4181-ab8e-201efd0fc67e-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.219492 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.220551 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican8d7a-account-delete-fx9r7" event={"ID":"a9922091-0d6f-44cf-9b98-8b97a811ea26","Type":"ContainerStarted","Data":"aaeb745fd17c0c125b59345463fa15671df0259f3d508b558026629b9155ea38"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.221536 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi8fda-account-delete-h8c2d"] Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.232497 5002 generic.go:334] "Generic (PLEG): container finished" podID="30820296-8679-481c-9466-014d473e51ee" containerID="9d77ab2291660608c77e2f3623a3656df2f34d2c652d9964617af8f0a234719c" exitCode=0 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.232520 5002 generic.go:334] "Generic (PLEG): container finished" podID="30820296-8679-481c-9466-014d473e51ee" containerID="e81a6b5b2d06af54cddc4be90e4d4a13ac27bedc2032aaec6cf75ef0057328dd" exitCode=0 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.232586 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-df99c8b7f-mljz8" event={"ID":"30820296-8679-481c-9466-014d473e51ee","Type":"ContainerDied","Data":"9d77ab2291660608c77e2f3623a3656df2f34d2c652d9964617af8f0a234719c"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.232611 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-df99c8b7f-mljz8" event={"ID":"30820296-8679-481c-9466-014d473e51ee","Type":"ContainerDied","Data":"e81a6b5b2d06af54cddc4be90e4d4a13ac27bedc2032aaec6cf75ef0057328dd"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.232623 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-df99c8b7f-mljz8" event={"ID":"30820296-8679-481c-9466-014d473e51ee","Type":"ContainerDied","Data":"c81a36efa37c21f6dc6f757cf32fc6a5e64fa7b0649c5acb9dc86bcc83764f0e"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.232633 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c81a36efa37c21f6dc6f757cf32fc6a5e64fa7b0649c5acb9dc86bcc83764f0e" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.253459 5002 generic.go:334] "Generic (PLEG): container finished" podID="40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" containerID="7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7" exitCode=143 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.253547 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" event={"ID":"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f","Type":"ContainerDied","Data":"7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.255468 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_2f69c54d-bd52-413b-86b6-6b5c4ca765ba/ovsdbserver-nb/0.log" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.255574 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.255597 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"2f69c54d-bd52-413b-86b6-6b5c4ca765ba","Type":"ContainerDied","Data":"fb81bfa6aa5abf78d199d7502ff93ca2c296eb1b7c137e553c06af9dc461679d"} Dec 03 16:56:02 crc kubenswrapper[5002]: W1203 16:56:02.255621 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33f61f67_5ef5_41a9_8bea_3335115b78e9.slice/crio-4af45ecb2aea8e64aa1dffce985c414bfe8d6d653169e479a7bc596c1f30b422 WatchSource:0}: Error finding container 4af45ecb2aea8e64aa1dffce985c414bfe8d6d653169e479a7bc596c1f30b422: Status 404 returned error can't find the container with id 4af45ecb2aea8e64aa1dffce985c414bfe8d6d653169e479a7bc596c1f30b422 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.255634 5002 scope.go:117] "RemoveContainer" containerID="9fb4d5ac070c404dafdfc058deb933ce0850d4c297f125e77eac745370a46ed9" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.267558 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronb96b-account-delete-ttg76" event={"ID":"13c5aef5-d2f5-4449-8cce-125cdf61d06b","Type":"ContainerStarted","Data":"3d8d5655e5d3b4c3dfc185dad2f1771c6d25c64a8ed411c48a49d9be4abde856"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.281201 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-dns-svc\") pod \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.281289 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-dns-swift-storage-0\") pod \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.281325 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-config\") pod \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.281382 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-ovsdbserver-nb\") pod \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.281461 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r57xj\" (UniqueName: \"kubernetes.io/projected/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-kube-api-access-r57xj\") pod \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.281532 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-ovsdbserver-sb\") pod \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\" (UID: \"34a0fbfb-baac-41c7-8430-cb0e1720dfa5\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.289987 5002 generic.go:334] "Generic (PLEG): container finished" podID="bc3c0d8b-823d-42bc-a114-766494075e59" containerID="5a4ccb5005877315a1ce91ddaced6dd35310247cd441e54f0d01164a98a72004" exitCode=0 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.290080 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancea518-account-delete-tg6nl" event={"ID":"bc3c0d8b-823d-42bc-a114-766494075e59","Type":"ContainerDied","Data":"5a4ccb5005877315a1ce91ddaced6dd35310247cd441e54f0d01164a98a72004"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.290584 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-kube-api-access-r57xj" (OuterVolumeSpecName: "kube-api-access-r57xj") pod "34a0fbfb-baac-41c7-8430-cb0e1720dfa5" (UID: "34a0fbfb-baac-41c7-8430-cb0e1720dfa5"). InnerVolumeSpecName "kube-api-access-r57xj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.328513 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-qnvnc_9f59bead-66d7-4fcb-842f-e573fcadcf1f/openstack-network-exporter/0.log" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.328606 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qnvnc" event={"ID":"9f59bead-66d7-4fcb-842f-e573fcadcf1f","Type":"ContainerDied","Data":"13d013a19c6910bc14e78d414164bc6ef0bce38644604d22080f59e3d5cc9aa5"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.328688 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qnvnc" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.343728 5002 generic.go:334] "Generic (PLEG): container finished" podID="14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" containerID="7319e26425e43fd3866431755e6e1859112eae7b40a15901924150e6da469063" exitCode=0 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.343794 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-68bfc56b4f-vnlr5" event={"ID":"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c","Type":"ContainerDied","Data":"7319e26425e43fd3866431755e6e1859112eae7b40a15901924150e6da469063"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.383300 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="51ef41e140012493084ca9a5ee4771bb67457963ca1eb5c801a48e1b0525b81d" exitCode=0 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.383328 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="9d9bb1df438ed50da59ac696f69efde7c6a48d1828bcb3dd1620fd321d2b4d34" exitCode=0 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.383336 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="a869513d51677c3c369edfd440f7ae36fb809c1de1a7e02989a558f03d0af6af" exitCode=0 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.383344 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="fd82b64468e87bb52951423d662c96298a04577e50fb7dfae08cf95f6cb95f60" exitCode=0 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.383381 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"51ef41e140012493084ca9a5ee4771bb67457963ca1eb5c801a48e1b0525b81d"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.383406 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"9d9bb1df438ed50da59ac696f69efde7c6a48d1828bcb3dd1620fd321d2b4d34"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.383416 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"a869513d51677c3c369edfd440f7ae36fb809c1de1a7e02989a558f03d0af6af"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.383425 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"fd82b64468e87bb52951423d662c96298a04577e50fb7dfae08cf95f6cb95f60"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.384609 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r57xj\" (UniqueName: \"kubernetes.io/projected/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-kube-api-access-r57xj\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:02 crc kubenswrapper[5002]: E1203 16:56:02.384665 5002 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 03 16:56:02 crc kubenswrapper[5002]: E1203 16:56:02.384705 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data podName:e7a598f1-0f32-448c-b08f-b5b0e70f583d nodeName:}" failed. No retries permitted until 2025-12-03 16:56:06.384689741 +0000 UTC m=+1489.798511629 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data") pod "rabbitmq-cell1-server-0" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d") : configmap "rabbitmq-cell1-config-data" not found Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.395981 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.406217 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" event={"ID":"34a0fbfb-baac-41c7-8430-cb0e1720dfa5","Type":"ContainerDied","Data":"c694c2b070b49fa89d101386b041a7e045c135420e19fa507dfc5ef9e4a7de9e"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.406317 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9cbcb645-dw9nm" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.424468 5002 scope.go:117] "RemoveContainer" containerID="fea9cb37f60037b92107bb54b095e6f2b946d24fabe0764e7886f0b91713e11b" Dec 03 16:56:02 crc kubenswrapper[5002]: E1203 16:56:02.453571 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="66caf906a5bd8b8162200583f4b593bcda499f2ebcd4fa929fb768cd57da8948" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.466107 5002 generic.go:334] "Generic (PLEG): container finished" podID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerID="5e6958690a01bcd45b235c0f78e4750c3aa535d1ce524a8ad09dfce393a6bba0" exitCode=143 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.466222 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4e408ec9-5662-43a6-93fd-9fa7a60f98db","Type":"ContainerDied","Data":"5e6958690a01bcd45b235c0f78e4750c3aa535d1ce524a8ad09dfce393a6bba0"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.467881 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "34a0fbfb-baac-41c7-8430-cb0e1720dfa5" (UID: "34a0fbfb-baac-41c7-8430-cb0e1720dfa5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.487800 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-combined-ca-bundle\") pod \"30820296-8679-481c-9466-014d473e51ee\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.487926 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-public-tls-certs\") pod \"30820296-8679-481c-9466-014d473e51ee\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.487958 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30820296-8679-481c-9466-014d473e51ee-etc-swift\") pod \"30820296-8679-481c-9466-014d473e51ee\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.487985 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-config-data\") pod \"30820296-8679-481c-9466-014d473e51ee\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.488007 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-internal-tls-certs\") pod \"30820296-8679-481c-9466-014d473e51ee\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.488121 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30820296-8679-481c-9466-014d473e51ee-log-httpd\") pod \"30820296-8679-481c-9466-014d473e51ee\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.488168 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27fbw\" (UniqueName: \"kubernetes.io/projected/30820296-8679-481c-9466-014d473e51ee-kube-api-access-27fbw\") pod \"30820296-8679-481c-9466-014d473e51ee\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.488187 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30820296-8679-481c-9466-014d473e51ee-run-httpd\") pod \"30820296-8679-481c-9466-014d473e51ee\" (UID: \"30820296-8679-481c-9466-014d473e51ee\") " Dec 03 16:56:02 crc kubenswrapper[5002]: E1203 16:56:02.488283 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="66caf906a5bd8b8162200583f4b593bcda499f2ebcd4fa929fb768cd57da8948" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.488599 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.489337 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30820296-8679-481c-9466-014d473e51ee-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "30820296-8679-481c-9466-014d473e51ee" (UID: "30820296-8679-481c-9466-014d473e51ee"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.489368 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30820296-8679-481c-9466-014d473e51ee-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "30820296-8679-481c-9466-014d473e51ee" (UID: "30820296-8679-481c-9466-014d473e51ee"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.499055 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.503577 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 16:56:02 crc kubenswrapper[5002]: E1203 16:56:02.504165 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="66caf906a5bd8b8162200583f4b593bcda499f2ebcd4fa929fb768cd57da8948" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 03 16:56:02 crc kubenswrapper[5002]: E1203 16:56:02.504230 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="5a3af9fa-d550-4d97-8d54-b198f0ca6f31" containerName="nova-cell1-conductor-conductor" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.508915 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_9eacefa0-a1f4-4181-ab8e-201efd0fc67e/ovsdbserver-sb/0.log" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.508988 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"9eacefa0-a1f4-4181-ab8e-201efd0fc67e","Type":"ContainerDied","Data":"93f6e107bbb3c4a0c3a783debd48c067ea7aa8d27f0862ea8122e14c6544a00a"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.509063 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.514338 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-qnvnc"] Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.516359 5002 generic.go:334] "Generic (PLEG): container finished" podID="aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" containerID="49cfe11824388fcd621bf5c8c3ab0301531171637a9eff8d8df877c7b70ebe69" exitCode=143 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.516425 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" event={"ID":"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9","Type":"ContainerDied","Data":"49cfe11824388fcd621bf5c8c3ab0301531171637a9eff8d8df877c7b70ebe69"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.519313 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-qnvnc"] Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.526442 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30820296-8679-481c-9466-014d473e51ee-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "30820296-8679-481c-9466-014d473e51ee" (UID: "30820296-8679-481c-9466-014d473e51ee"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.526817 5002 generic.go:334] "Generic (PLEG): container finished" podID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" exitCode=0 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.526899 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jkwrt" event={"ID":"f5cc28df-be84-4c87-b0fc-a523c5a23395","Type":"ContainerDied","Data":"698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.527122 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell0be01-account-delete-r7zk7"] Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.532324 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30820296-8679-481c-9466-014d473e51ee-kube-api-access-27fbw" (OuterVolumeSpecName: "kube-api-access-27fbw") pod "30820296-8679-481c-9466-014d473e51ee" (UID: "30820296-8679-481c-9466-014d473e51ee"). InnerVolumeSpecName "kube-api-access-27fbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.537221 5002 generic.go:334] "Generic (PLEG): container finished" podID="b576f304-fd7e-419e-937c-dafaf1c00970" containerID="b860ed590860a3a1a54cd9b7b0258210275e0aeba1d0db0458d40dced1fb66a7" exitCode=143 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.537291 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b576f304-fd7e-419e-937c-dafaf1c00970","Type":"ContainerDied","Data":"b860ed590860a3a1a54cd9b7b0258210275e0aeba1d0db0458d40dced1fb66a7"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.544145 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placemente6cd-account-delete-wpdgt" event={"ID":"05d0c2be-6525-4ec1-bcae-e240255c970a","Type":"ContainerStarted","Data":"e2d41fb6210ddfb0fb39f3a4bf92de9fc6bad5a954c43dc2e9abead7a4532887"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.556721 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.563306 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.581467 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder5ccf-account-delete-j49bd" event={"ID":"de753fc7-23ae-4680-9d4c-11e5632d749d","Type":"ContainerStarted","Data":"7493a23574d5d7c151518152bfe01d7eeb977c23a3f2b88081d2bf4a38f98d94"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.583705 5002 scope.go:117] "RemoveContainer" containerID="2f14606baed65302b953c10a81a0db167dba604320e45146e5757544b0bb9b0c" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.586246 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"922c692b-3d5c-45df-862d-d4e08b06fe0b","Type":"ContainerDied","Data":"15e428211cbb8116af4bedac9722e645f9afc19ec6e413fd937a91a0fcbadc81"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.586321 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.590672 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30820296-8679-481c-9466-014d473e51ee-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.590707 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27fbw\" (UniqueName: \"kubernetes.io/projected/30820296-8679-481c-9466-014d473e51ee-kube-api-access-27fbw\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.590717 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30820296-8679-481c-9466-014d473e51ee-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.590725 5002 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30820296-8679-481c-9466-014d473e51ee-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.591339 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.595174 5002 generic.go:334] "Generic (PLEG): container finished" podID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" containerID="7782f9e82bd0b16c1ea4af876571de089d43551370a7363494d32a492da6fdb9" exitCode=143 Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.595205 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b89c68cbb-nkz44" event={"ID":"55e94451-ebc7-4a6c-9927-df89ae0fc3c2","Type":"ContainerDied","Data":"7782f9e82bd0b16c1ea4af876571de089d43551370a7363494d32a492da6fdb9"} Dec 03 16:56:02 crc kubenswrapper[5002]: I1203 16:56:02.604333 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "34a0fbfb-baac-41c7-8430-cb0e1720dfa5" (UID: "34a0fbfb-baac-41c7-8430-cb0e1720dfa5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:02 crc kubenswrapper[5002]: E1203 16:56:02.647021 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9bf24b4c96cc80a30cc1370d78b90b73f20e379aa3232516e9166ca4be82531b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.659659 5002 scope.go:117] "RemoveContainer" containerID="58c1385bcef3302471d6a081ef5d49065e260a3904a830edb4066b5c487279af" Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:02.659866 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9bf24b4c96cc80a30cc1370d78b90b73f20e379aa3232516e9166ca4be82531b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.674277 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:02.678376 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9bf24b4c96cc80a30cc1370d78b90b73f20e379aa3232516e9166ca4be82531b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:02.678447 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="86921476-d5b9-4fc0-86d1-aa82dd931e5f" containerName="nova-scheduler-scheduler" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.679823 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.692660 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.708589 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-config" (OuterVolumeSpecName: "config") pod "34a0fbfb-baac-41c7-8430-cb0e1720dfa5" (UID: "34a0fbfb-baac-41c7-8430-cb0e1720dfa5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.729986 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "34a0fbfb-baac-41c7-8430-cb0e1720dfa5" (UID: "34a0fbfb-baac-41c7-8430-cb0e1720dfa5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.796388 5002 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.796428 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.876962 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1823be31-afb8-4085-a9a1-f1b75c65f3a2" path="/var/lib/kubelet/pods/1823be31-afb8-4085-a9a1-f1b75c65f3a2/volumes" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.878045 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f69c54d-bd52-413b-86b6-6b5c4ca765ba" path="/var/lib/kubelet/pods/2f69c54d-bd52-413b-86b6-6b5c4ca765ba/volumes" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.878947 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="416c4441-853a-4b67-95a9-79fc893fa7be" path="/var/lib/kubelet/pods/416c4441-853a-4b67-95a9-79fc893fa7be/volumes" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.881759 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f384c1f-6c43-4273-9dd6-301c4aad47bd" path="/var/lib/kubelet/pods/5f384c1f-6c43-4273-9dd6-301c4aad47bd/volumes" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.882448 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88" path="/var/lib/kubelet/pods/7e60d1fa-1361-4dd2-bf4f-d17f6a49aa88/volumes" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.883695 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="922c692b-3d5c-45df-862d-d4e08b06fe0b" path="/var/lib/kubelet/pods/922c692b-3d5c-45df-862d-d4e08b06fe0b/volumes" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.885124 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9eacefa0-a1f4-4181-ab8e-201efd0fc67e" path="/var/lib/kubelet/pods/9eacefa0-a1f4-4181-ab8e-201efd0fc67e/volumes" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.886401 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f59bead-66d7-4fcb-842f-e573fcadcf1f" path="/var/lib/kubelet/pods/9f59bead-66d7-4fcb-842f-e573fcadcf1f/volumes" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.897353 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b405bac2-d301-4132-bd82-a1c7d0b0df6c" path="/var/lib/kubelet/pods/b405bac2-d301-4132-bd82-a1c7d0b0df6c/volumes" Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:02.907871 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d6e5fcf1f9f04b8b697e12bf0ae6fa6a1d7ab6a6a2509530bdd7448a85fdb39" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:02.915894 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d6e5fcf1f9f04b8b697e12bf0ae6fa6a1d7ab6a6a2509530bdd7448a85fdb39" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:02.931902 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d6e5fcf1f9f04b8b697e12bf0ae6fa6a1d7ab6a6a2509530bdd7448a85fdb39" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:02.931973 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="29a68818-9346-4437-9527-aea9383c1a25" containerName="ovn-northd" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:02.989159 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "34a0fbfb-baac-41c7-8430-cb0e1720dfa5" (UID: "34a0fbfb-baac-41c7-8430-cb0e1720dfa5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.003078 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34a0fbfb-baac-41c7-8430-cb0e1720dfa5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.124919 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "30820296-8679-481c-9466-014d473e51ee" (UID: "30820296-8679-481c-9466-014d473e51ee"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.135832 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "30820296-8679-481c-9466-014d473e51ee" (UID: "30820296-8679-481c-9466-014d473e51ee"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.138581 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30820296-8679-481c-9466-014d473e51ee" (UID: "30820296-8679-481c-9466-014d473e51ee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.151016 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-config-data" (OuterVolumeSpecName: "config-data") pod "30820296-8679-481c-9466-014d473e51ee" (UID: "30820296-8679-481c-9466-014d473e51ee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.207359 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.207395 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.207408 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.207420 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/30820296-8679-481c-9466-014d473e51ee-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.335303 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="965b05ab-f8e9-485e-9f15-2160a598d8c2" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.163:8776/healthcheck\": read tcp 10.217.0.2:33820->10.217.0.163:8776: read: connection reset by peer" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.657844 5002 generic.go:334] "Generic (PLEG): container finished" podID="05d0c2be-6525-4ec1-bcae-e240255c970a" containerID="b31ca3192535ab4be61b9eb6074bc739b997b6f06d2c9f4e0e1cf060deb5622c" exitCode=0 Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.685781 5002 generic.go:334] "Generic (PLEG): container finished" podID="ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" containerID="d8f143f4c81641c282592b60722c3b70b5b045ac7605f798b8c288b1a27c879f" exitCode=0 Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.688461 5002 generic.go:334] "Generic (PLEG): container finished" podID="de753fc7-23ae-4680-9d4c-11e5632d749d" containerID="73c4d49306ef8821fe226706408e794a8c85e94b5167824f65c0f4a8a0ad2be4" exitCode=0 Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.690449 5002 generic.go:334] "Generic (PLEG): container finished" podID="13c5aef5-d2f5-4449-8cce-125cdf61d06b" containerID="9f8d3e7dd1232b8bed9118f1348182183eb70011fc8ae05a19a411172dade4b4" exitCode=0 Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.693934 5002 generic.go:334] "Generic (PLEG): container finished" podID="965b05ab-f8e9-485e-9f15-2160a598d8c2" containerID="2c7e91463c94f3cc50714dbf46be64e0f5e71d87a2dbb56ce0212f820db59c76" exitCode=0 Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:03.694664 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 79a5eb86d5fe80c84e8ed1927b16af9e3b75324f11a9482bafaacf52ecc8aa98 is running failed: container process not found" containerID="79a5eb86d5fe80c84e8ed1927b16af9e3b75324f11a9482bafaacf52ecc8aa98" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:03.694993 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 79a5eb86d5fe80c84e8ed1927b16af9e3b75324f11a9482bafaacf52ecc8aa98 is running failed: container process not found" containerID="79a5eb86d5fe80c84e8ed1927b16af9e3b75324f11a9482bafaacf52ecc8aa98" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:03.695299 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 79a5eb86d5fe80c84e8ed1927b16af9e3b75324f11a9482bafaacf52ecc8aa98 is running failed: container process not found" containerID="79a5eb86d5fe80c84e8ed1927b16af9e3b75324f11a9482bafaacf52ecc8aa98" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:03.695382 5002 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 79a5eb86d5fe80c84e8ed1927b16af9e3b75324f11a9482bafaacf52ecc8aa98 is running failed: container process not found" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="b132eed4-cb4d-4abc-b49a-55688686288d" containerName="galera" Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.700436 5002 generic.go:334] "Generic (PLEG): container finished" podID="1c099352-abbe-4c3a-9431-c854e5333420" containerID="02d099c7b5a52a32907ff960d5be05cd37532b1c4db658f445b99342fec6b927" exitCode=0 Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.701767 5002 generic.go:334] "Generic (PLEG): container finished" podID="33f61f67-5ef5-41a9-8bea-3335115b78e9" containerID="be947ce6345fc6664a5a4f6598b88db3c63f4191808ff50a2479b4e6f82fb2a5" exitCode=0 Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.734953 5002 generic.go:334] "Generic (PLEG): container finished" podID="5a3af9fa-d550-4d97-8d54-b198f0ca6f31" containerID="66caf906a5bd8b8162200583f4b593bcda499f2ebcd4fa929fb768cd57da8948" exitCode=0 Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.748393 5002 generic.go:334] "Generic (PLEG): container finished" podID="a9922091-0d6f-44cf-9b98-8b97a811ea26" containerID="480ec2b533abe5221031de6e27a59835f1f69c022a454f99a2fbd94cdb9e4c76" exitCode=0 Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.791267 5002 generic.go:334] "Generic (PLEG): container finished" podID="b132eed4-cb4d-4abc-b49a-55688686288d" containerID="79a5eb86d5fe80c84e8ed1927b16af9e3b75324f11a9482bafaacf52ecc8aa98" exitCode=0 Dec 03 16:56:03 crc kubenswrapper[5002]: I1203 16:56:03.798667 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-df99c8b7f-mljz8" Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:03.951051 5002 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 03 16:56:03 crc kubenswrapper[5002]: E1203 16:56:03.951122 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data podName:382d6556-c45b-43dd-a4fa-16b3e91e0725 nodeName:}" failed. No retries permitted until 2025-12-03 16:56:07.951104532 +0000 UTC m=+1491.364926430 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data") pod "rabbitmq-server-0" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725") : configmap "rabbitmq-config-data" not found Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.056414 5002 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.216s" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.056448 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placemente6cd-account-delete-wpdgt" event={"ID":"05d0c2be-6525-4ec1-bcae-e240255c970a","Type":"ContainerDied","Data":"b31ca3192535ab4be61b9eb6074bc739b997b6f06d2c9f4e0e1cf060deb5622c"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.056539 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.056555 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929","Type":"ContainerDied","Data":"d8f143f4c81641c282592b60722c3b70b5b045ac7605f798b8c288b1a27c879f"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.056568 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.056578 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.056588 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-76gsx"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.056597 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-4kfbq"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.056607 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-76gsx"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.056620 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-4kfbq"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.056629 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-78586996b8-nkxdr"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.056639 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.057296 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="ceilometer-central-agent" containerID="cri-o://b8df45427a97ecede50bedecdafce707afb7a6c3b99bed2b3b06ade3f707f8c0" gracePeriod=30 Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.057453 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="proxy-httpd" containerID="cri-o://dfddf060fa4fddc35fda40088529fbf414441a192eea714876824df928864e7e" gracePeriod=30 Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.057505 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="sg-core" containerID="cri-o://739bab8b7d0739633fc83f4a91f4b03d89579795c296b2c49a39101f7359c721" gracePeriod=30 Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.057547 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="ceilometer-notification-agent" containerID="cri-o://f170d2d5c5453f499381793538a1774fccdd3cc83fae41dd5c62f54865d69fea" gracePeriod=30 Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.057439 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="bea5a03b-519f-4af4-873a-e5e7f9e8f769" containerName="kube-state-metrics" containerID="cri-o://4e5b334c5a3bbdfab01571abbbf387660f52e776af1bc465c880770ae52c4855" gracePeriod=30 Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.056657 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder5ccf-account-delete-j49bd" event={"ID":"de753fc7-23ae-4680-9d4c-11e5632d749d","Type":"ContainerDied","Data":"73c4d49306ef8821fe226706408e794a8c85e94b5167824f65c0f4a8a0ad2be4"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058058 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronb96b-account-delete-ttg76" event={"ID":"13c5aef5-d2f5-4449-8cce-125cdf61d06b","Type":"ContainerDied","Data":"9f8d3e7dd1232b8bed9118f1348182183eb70011fc8ae05a19a411172dade4b4"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058071 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"965b05ab-f8e9-485e-9f15-2160a598d8c2","Type":"ContainerDied","Data":"2c7e91463c94f3cc50714dbf46be64e0f5e71d87a2dbb56ce0212f820db59c76"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058110 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0be01-account-delete-r7zk7" event={"ID":"721437ce-17b0-434b-9604-197f795ed1d9","Type":"ContainerStarted","Data":"383af97d21335016a4ee42514ad7c02f5401fc06e2ef0ca38f199ba1e62faa46"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058122 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-bj88z"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058134 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-bj88z"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058147 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placemente6cd-account-delete-wpdgt"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058160 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-e6cd-account-create-update-v4z66"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058171 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-e6cd-account-create-update-v4z66"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058181 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6495d47864-tf6dm" event={"ID":"1c099352-abbe-4c3a-9431-c854e5333420","Type":"ContainerDied","Data":"02d099c7b5a52a32907ff960d5be05cd37532b1c4db658f445b99342fec6b927"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058193 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi8fda-account-delete-h8c2d" event={"ID":"33f61f67-5ef5-41a9-8bea-3335115b78e9","Type":"ContainerDied","Data":"be947ce6345fc6664a5a4f6598b88db3c63f4191808ff50a2479b4e6f82fb2a5"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058203 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-gf624"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058213 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-gf624"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058222 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi8fda-account-delete-h8c2d" event={"ID":"33f61f67-5ef5-41a9-8bea-3335115b78e9","Type":"ContainerStarted","Data":"4af45ecb2aea8e64aa1dffce985c414bfe8d6d653169e479a7bc596c1f30b422"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058236 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-864c-account-create-update-f9jhh"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058246 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-864c-account-create-update-f9jhh"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058255 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-x6tvw"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058264 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"5a3af9fa-d550-4d97-8d54-b198f0ca6f31","Type":"ContainerDied","Data":"66caf906a5bd8b8162200583f4b593bcda499f2ebcd4fa929fb768cd57da8948"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058274 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican8d7a-account-delete-fx9r7" event={"ID":"a9922091-0d6f-44cf-9b98-8b97a811ea26","Type":"ContainerDied","Data":"480ec2b533abe5221031de6e27a59835f1f69c022a454f99a2fbd94cdb9e4c76"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058286 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-x6tvw"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058298 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b132eed4-cb4d-4abc-b49a-55688686288d","Type":"ContainerDied","Data":"79a5eb86d5fe80c84e8ed1927b16af9e3b75324f11a9482bafaacf52ecc8aa98"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058307 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b132eed4-cb4d-4abc-b49a-55688686288d","Type":"ContainerDied","Data":"952dc21f12f66b37c4abc6cfb2a7cbd91ac9fe4cf84948f1a14010e80be6ae18"} Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.058317 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="952dc21f12f66b37c4abc6cfb2a7cbd91ac9fe4cf84948f1a14010e80be6ae18" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.063074 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-78586996b8-nkxdr" podUID="dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" containerName="keystone-api" containerID="cri-o://8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455" gracePeriod=30 Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.067867 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="852cfff4-0855-40ab-a82d-b560c37118bf" containerName="memcached" containerID="cri-o://2ba9a566a71354e8ef6bda7b9341c58e4f1b4fb5649b671ff2b3c91f0cb5d63d" gracePeriod=30 Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.091083 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.091182 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronb96b-account-delete-ttg76"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.112156 5002 scope.go:117] "RemoveContainer" containerID="5dd60355fb5a4a6102dcb9b1dd2b0adb310b8248a9479047879e2a7f22a1d4bb" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.129229 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b89c68cbb-nkz44" podUID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": read tcp 10.217.0.2:60222->10.217.0.162:9311: read: connection reset by peer" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.129547 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b89c68cbb-nkz44" podUID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": read tcp 10.217.0.2:60232->10.217.0.162:9311: read: connection reset by peer" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.152258 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-b96b-account-create-update-bnk5v"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.160655 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b132eed4-cb4d-4abc-b49a-55688686288d-galera-tls-certs\") pod \"b132eed4-cb4d-4abc-b49a-55688686288d\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.160725 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"b132eed4-cb4d-4abc-b49a-55688686288d\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.160853 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b132eed4-cb4d-4abc-b49a-55688686288d-config-data-generated\") pod \"b132eed4-cb4d-4abc-b49a-55688686288d\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.160920 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvc54\" (UniqueName: \"kubernetes.io/projected/b132eed4-cb4d-4abc-b49a-55688686288d-kube-api-access-nvc54\") pod \"b132eed4-cb4d-4abc-b49a-55688686288d\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.161005 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-kolla-config\") pod \"b132eed4-cb4d-4abc-b49a-55688686288d\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.161036 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-config-data-default\") pod \"b132eed4-cb4d-4abc-b49a-55688686288d\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.161115 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b132eed4-cb4d-4abc-b49a-55688686288d-combined-ca-bundle\") pod \"b132eed4-cb4d-4abc-b49a-55688686288d\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.161146 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-operator-scripts\") pod \"b132eed4-cb4d-4abc-b49a-55688686288d\" (UID: \"b132eed4-cb4d-4abc-b49a-55688686288d\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.179107 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b132eed4-cb4d-4abc-b49a-55688686288d" (UID: "b132eed4-cb4d-4abc-b49a-55688686288d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.181568 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b132eed4-cb4d-4abc-b49a-55688686288d-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "b132eed4-cb4d-4abc-b49a-55688686288d" (UID: "b132eed4-cb4d-4abc-b49a-55688686288d"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.182710 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "b132eed4-cb4d-4abc-b49a-55688686288d" (UID: "b132eed4-cb4d-4abc-b49a-55688686288d"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.183191 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "b132eed4-cb4d-4abc-b49a-55688686288d" (UID: "b132eed4-cb4d-4abc-b49a-55688686288d"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.183498 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-b96b-account-create-update-bnk5v"] Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.225529 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fcdafeed97d1bf71baec2b1679ea4a83c44d81bf0870a234df0c8a45883f5c37" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.240287 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fcdafeed97d1bf71baec2b1679ea4a83c44d81bf0870a234df0c8a45883f5c37" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.240442 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.240550 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.247295 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fcdafeed97d1bf71baec2b1679ea4a83c44d81bf0870a234df0c8a45883f5c37" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.247322 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.247510 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.247922 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7" containerName="nova-cell0-conductor-conductor" Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.250009 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.259167 5002 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.270370 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "b132eed4-cb4d-4abc-b49a-55688686288d" (UID: "b132eed4-cb4d-4abc-b49a-55688686288d"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.270622 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.270683 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovs-vswitchd" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.295230 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-hnkdk" podUID="2e876c11-14f1-4e51-90a1-e2cdddc08c87" containerName="ovn-controller" probeResult="failure" output=< Dec 03 16:56:04 crc kubenswrapper[5002]: ERROR - Failed to get connection status from ovn-controller, ovn-appctl exit status: 0 Dec 03 16:56:04 crc kubenswrapper[5002]: > Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.299078 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.315336 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.315385 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b132eed4-cb4d-4abc-b49a-55688686288d-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.315404 5002 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.315417 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.315434 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b132eed4-cb4d-4abc-b49a-55688686288d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.324128 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.334019 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b132eed4-cb4d-4abc-b49a-55688686288d-kube-api-access-nvc54" (OuterVolumeSpecName: "kube-api-access-nvc54") pod "b132eed4-cb4d-4abc-b49a-55688686288d" (UID: "b132eed4-cb4d-4abc-b49a-55688686288d"). InnerVolumeSpecName "kube-api-access-nvc54". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.367713 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.418804 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2t5sh\" (UniqueName: \"kubernetes.io/projected/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-kube-api-access-2t5sh\") pod \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\" (UID: \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419133 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-logs\") pod \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419173 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jslt\" (UniqueName: \"kubernetes.io/projected/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-kube-api-access-8jslt\") pod \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419198 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-scripts\") pod \"965b05ab-f8e9-485e-9f15-2160a598d8c2\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419222 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419285 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-httpd-run\") pod \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419307 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-combined-ca-bundle\") pod \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419353 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-config-data-custom\") pod \"965b05ab-f8e9-485e-9f15-2160a598d8c2\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419375 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-internal-tls-certs\") pod \"965b05ab-f8e9-485e-9f15-2160a598d8c2\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419401 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/965b05ab-f8e9-485e-9f15-2160a598d8c2-logs\") pod \"965b05ab-f8e9-485e-9f15-2160a598d8c2\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419433 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-config-data\") pod \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419466 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/965b05ab-f8e9-485e-9f15-2160a598d8c2-etc-machine-id\") pod \"965b05ab-f8e9-485e-9f15-2160a598d8c2\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419494 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlb5c\" (UniqueName: \"kubernetes.io/projected/965b05ab-f8e9-485e-9f15-2160a598d8c2-kube-api-access-jlb5c\") pod \"965b05ab-f8e9-485e-9f15-2160a598d8c2\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419518 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-combined-ca-bundle\") pod \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\" (UID: \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419538 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-config-data\") pod \"965b05ab-f8e9-485e-9f15-2160a598d8c2\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419566 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-internal-tls-certs\") pod \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419592 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-config-data\") pod \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\" (UID: \"5a3af9fa-d550-4d97-8d54-b198f0ca6f31\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419616 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-public-tls-certs\") pod \"965b05ab-f8e9-485e-9f15-2160a598d8c2\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419638 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-scripts\") pod \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\" (UID: \"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.419659 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-combined-ca-bundle\") pod \"965b05ab-f8e9-485e-9f15-2160a598d8c2\" (UID: \"965b05ab-f8e9-485e-9f15-2160a598d8c2\") " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.420341 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvc54\" (UniqueName: \"kubernetes.io/projected/b132eed4-cb4d-4abc-b49a-55688686288d-kube-api-access-nvc54\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.425335 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/965b05ab-f8e9-485e-9f15-2160a598d8c2-logs" (OuterVolumeSpecName: "logs") pod "965b05ab-f8e9-485e-9f15-2160a598d8c2" (UID: "965b05ab-f8e9-485e-9f15-2160a598d8c2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.426247 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/965b05ab-f8e9-485e-9f15-2160a598d8c2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "965b05ab-f8e9-485e-9f15-2160a598d8c2" (UID: "965b05ab-f8e9-485e-9f15-2160a598d8c2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.427170 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" (UID: "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.446385 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-logs" (OuterVolumeSpecName: "logs") pod "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" (UID: "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.446654 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-kube-api-access-2t5sh" (OuterVolumeSpecName: "kube-api-access-2t5sh") pod "5a3af9fa-d550-4d97-8d54-b198f0ca6f31" (UID: "5a3af9fa-d550-4d97-8d54-b198f0ca6f31"). InnerVolumeSpecName "kube-api-access-2t5sh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.453784 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.459986 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b132eed4-cb4d-4abc-b49a-55688686288d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b132eed4-cb4d-4abc-b49a-55688686288d" (UID: "b132eed4-cb4d-4abc-b49a-55688686288d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.461958 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-kube-api-access-8jslt" (OuterVolumeSpecName: "kube-api-access-8jslt") pod "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" (UID: "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929"). InnerVolumeSpecName "kube-api-access-8jslt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.462103 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" (UID: "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.462191 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/965b05ab-f8e9-485e-9f15-2160a598d8c2-kube-api-access-jlb5c" (OuterVolumeSpecName: "kube-api-access-jlb5c") pod "965b05ab-f8e9-485e-9f15-2160a598d8c2" (UID: "965b05ab-f8e9-485e-9f15-2160a598d8c2"). InnerVolumeSpecName "kube-api-access-jlb5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.462337 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-scripts" (OuterVolumeSpecName: "scripts") pod "965b05ab-f8e9-485e-9f15-2160a598d8c2" (UID: "965b05ab-f8e9-485e-9f15-2160a598d8c2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.468364 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "965b05ab-f8e9-485e-9f15-2160a598d8c2" (UID: "965b05ab-f8e9-485e-9f15-2160a598d8c2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.468402 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-scripts" (OuterVolumeSpecName: "scripts") pod "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" (UID: "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.477121 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-6vv7m"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.524123 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b132eed4-cb4d-4abc-b49a-55688686288d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.524297 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2t5sh\" (UniqueName: \"kubernetes.io/projected/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-kube-api-access-2t5sh\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.524386 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.524439 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.524493 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jslt\" (UniqueName: \"kubernetes.io/projected/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-kube-api-access-8jslt\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.524541 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.524694 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.524761 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.524814 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.524861 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/965b05ab-f8e9-485e-9f15-2160a598d8c2-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.524907 5002 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/965b05ab-f8e9-485e-9f15-2160a598d8c2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.524986 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlb5c\" (UniqueName: \"kubernetes.io/projected/965b05ab-f8e9-485e-9f15-2160a598d8c2-kube-api-access-jlb5c\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.525102 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.553380 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-6vv7m"] Dec 03 16:56:04 crc kubenswrapper[5002]: E1203 16:56:04.570059 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbea5a03b_519f_4af4_873a_e5e7f9e8f769.slice/crio-4e5b334c5a3bbdfab01571abbbf387660f52e776af1bc465c880770ae52c4855.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55e94451_ebc7_4a6c_9927_df89ae0fc3c2.slice/crio-2ac72ccfe2ed308920637f371bbf7fd278ac3de12deca33a58820b774a39eb81.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb576f304_fd7e_419e_937c_dafaf1c00970.slice/crio-a5c953152a2dc657ee61cb4f66aa54e6b56055c989e05626225aa5ceeb69b1b2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb576f304_fd7e_419e_937c_dafaf1c00970.slice/crio-conmon-a5c953152a2dc657ee61cb4f66aa54e6b56055c989e05626225aa5ceeb69b1b2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf35ed8f9_f4d0_4987_9d3d_83b1b04d9b6a.slice/crio-conmon-739bab8b7d0739633fc83f4a91f4b03d89579795c296b2c49a39101f7359c721.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34a0fbfb_baac_41c7_8430_cb0e1720dfa5.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf35ed8f9_f4d0_4987_9d3d_83b1b04d9b6a.slice/crio-dfddf060fa4fddc35fda40088529fbf414441a192eea714876824df928864e7e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf35ed8f9_f4d0_4987_9d3d_83b1b04d9b6a.slice/crio-739bab8b7d0739633fc83f4a91f4b03d89579795c296b2c49a39101f7359c721.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34a0fbfb_baac_41c7_8430_cb0e1720dfa5.slice/crio-c694c2b070b49fa89d101386b041a7e045c135420e19fa507dfc5ef9e4a7de9e\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbea5a03b_519f_4af4_873a_e5e7f9e8f769.slice/crio-conmon-4e5b334c5a3bbdfab01571abbbf387660f52e776af1bc465c880770ae52c4855.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55e94451_ebc7_4a6c_9927_df89ae0fc3c2.slice/crio-conmon-2ac72ccfe2ed308920637f371bbf7fd278ac3de12deca33a58820b774a39eb81.scope\": RecentStats: unable to find data in memory cache]" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.570196 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8d7a-account-create-update-dnhkq"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.570546 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican8d7a-account-delete-fx9r7"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.580582 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8d7a-account-create-update-dnhkq"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.594920 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9cbcb645-dw9nm"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.607022 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9cbcb645-dw9nm"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.614900 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-df99c8b7f-mljz8"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.627937 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-df99c8b7f-mljz8"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.656600 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="60743dc4-9a30-4fd2-80c1-0c7427241e92" containerName="galera" containerID="cri-o://76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236" gracePeriod=30 Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.662050 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-nnvht"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.681195 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-nnvht"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.688957 5002 scope.go:117] "RemoveContainer" containerID="19fc75cf1b1a687269a530436d2398a6475ba55eef4ea65b1527d796cfd116c4" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.702234 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder5ccf-account-delete-j49bd"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.709020 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-5ccf-account-create-update-t42bt"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.721459 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-5ccf-account-create-update-t42bt"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.724833 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "965b05ab-f8e9-485e-9f15-2160a598d8c2" (UID: "965b05ab-f8e9-485e-9f15-2160a598d8c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.732428 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.735918 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-p8w5l"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.751160 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-p8w5l"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.778796 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi8fda-account-delete-h8c2d"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.785198 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-8fda-account-create-update-2mg6t"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.816964 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-8fda-account-create-update-2mg6t"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.827584 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-ppdv5"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.835167 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-ppdv5"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.849858 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0be01-account-delete-r7zk7"] Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.874521 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.892196 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "965b05ab-f8e9-485e-9f15-2160a598d8c2" (UID: "965b05ab-f8e9-485e-9f15-2160a598d8c2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.892196 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a3af9fa-d550-4d97-8d54-b198f0ca6f31" (UID: "5a3af9fa-d550-4d97-8d54-b198f0ca6f31"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.893350 5002 generic.go:334] "Generic (PLEG): container finished" podID="b576f304-fd7e-419e-937c-dafaf1c00970" containerID="a5c953152a2dc657ee61cb4f66aa54e6b56055c989e05626225aa5ceeb69b1b2" exitCode=0 Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.897971 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c540d5c-7b6e-4fef-8aa9-bce2a4d75111" path="/var/lib/kubelet/pods/0c540d5c-7b6e-4fef-8aa9-bce2a4d75111/volumes" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.898662 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30820296-8679-481c-9466-014d473e51ee" path="/var/lib/kubelet/pods/30820296-8679-481c-9466-014d473e51ee/volumes" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.899345 5002 generic.go:334] "Generic (PLEG): container finished" podID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" containerID="2ac72ccfe2ed308920637f371bbf7fd278ac3de12deca33a58820b774a39eb81" exitCode=0 Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.902401 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34a0fbfb-baac-41c7-8430-cb0e1720dfa5" path="/var/lib/kubelet/pods/34a0fbfb-baac-41c7-8430-cb0e1720dfa5/volumes" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.903930 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b132eed4-cb4d-4abc-b49a-55688686288d-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "b132eed4-cb4d-4abc-b49a-55688686288d" (UID: "b132eed4-cb4d-4abc-b49a-55688686288d"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.903951 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35fdb887-bef5-460d-af35-20096f2b736c" path="/var/lib/kubelet/pods/35fdb887-bef5-460d-af35-20096f2b736c/volumes" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.904768 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e39aa75-8f7f-4fd2-864b-b0ba10f955ca" path="/var/lib/kubelet/pods/5e39aa75-8f7f-4fd2-864b-b0ba10f955ca/volumes" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.909377 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" (UID: "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.911033 5002 generic.go:334] "Generic (PLEG): container finished" podID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerID="a31812f2b35f2e5582ee415727310457b192635bb15360c9e32d188a23358eea" exitCode=0 Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.931636 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63afe065-7146-41e8-b5d9-898d9492f88e" path="/var/lib/kubelet/pods/63afe065-7146-41e8-b5d9-898d9492f88e/volumes" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.932188 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="684116e4-56a2-4ac4-b802-d342d50db63f" path="/var/lib/kubelet/pods/684116e4-56a2-4ac4-b802-d342d50db63f/volumes" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.932722 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dde11f9-1724-4644-bba1-ab2e8efdee35" path="/var/lib/kubelet/pods/6dde11f9-1724-4644-bba1-ab2e8efdee35/volumes" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.935059 5002 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell0be01-account-delete-r7zk7" secret="" err="secret \"galera-openstack-dockercfg-4ch84\" not found" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.940149 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.940187 5002 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b132eed4-cb4d-4abc-b49a-55688686288d-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.940202 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.940216 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.940227 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.943032 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87c14106-47c6-4086-b10c-ca427875f3f0" path="/var/lib/kubelet/pods/87c14106-47c6-4086-b10c-ca427875f3f0/volumes" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.951279 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-config-data" (OuterVolumeSpecName: "config-data") pod "5a3af9fa-d550-4d97-8d54-b198f0ca6f31" (UID: "5a3af9fa-d550-4d97-8d54-b198f0ca6f31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.951394 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" (UID: "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.955390 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell0be01-account-delete-r7zk7" podStartSLOduration=5.95537128 podStartE2EDuration="5.95537128s" podCreationTimestamp="2025-12-03 16:55:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:56:04.928442088 +0000 UTC m=+1488.342263986" watchObservedRunningTime="2025-12-03 16:56:04.95537128 +0000 UTC m=+1488.369193168" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.960338 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0cdc857-0262-4cb5-b4a9-0c28ea317db9" path="/var/lib/kubelet/pods/a0cdc857-0262-4cb5-b4a9-0c28ea317db9/volumes" Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.981725 5002 generic.go:334] "Generic (PLEG): container finished" podID="d105ad9d-fbca-4a0c-b188-a88a363756c2" containerID="344937d693aca613f5d6c7658a05a5e864b67f0cdbdd5fe03d3655235754d316" exitCode=0 Dec 03 16:56:04 crc kubenswrapper[5002]: I1203 16:56:04.985824 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-config-data" (OuterVolumeSpecName: "config-data") pod "965b05ab-f8e9-485e-9f15-2160a598d8c2" (UID: "965b05ab-f8e9-485e-9f15-2160a598d8c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.019841 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b21b644c-09a6-4080-ba90-e2c8eb798535" path="/var/lib/kubelet/pods/b21b644c-09a6-4080-ba90-e2c8eb798535/volumes" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.022980 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b47a6833-493a-4bcf-9287-617fd78f4c40" path="/var/lib/kubelet/pods/b47a6833-493a-4bcf-9287-617fd78f4c40/volumes" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.023888 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd7b9f97-5f8f-4a6c-9151-6ec342c79d14" path="/var/lib/kubelet/pods/bd7b9f97-5f8f-4a6c-9151-6ec342c79d14/volumes" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.024448 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c45df5c7-76f5-466f-9393-7815176634b6" path="/var/lib/kubelet/pods/c45df5c7-76f5-466f-9393-7815176634b6/volumes" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.024972 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e39fcd9c-ea0f-4426-b6ec-97058995e105" path="/var/lib/kubelet/pods/e39fcd9c-ea0f-4426-b6ec-97058995e105/volumes" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.026224 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa754386-78cd-4b7a-8e5d-ba61e4f1d03f" path="/var/lib/kubelet/pods/fa754386-78cd-4b7a-8e5d-ba61e4f1d03f/volumes" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.026768 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbdf0c1c-4684-45ae-aedb-d91817e50576" path="/var/lib/kubelet/pods/fbdf0c1c-4684-45ae-aedb-d91817e50576/volumes" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.041655 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a3af9fa-d550-4d97-8d54-b198f0ca6f31-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.041677 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.041689 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: E1203 16:56:05.041764 5002 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 03 16:56:05 crc kubenswrapper[5002]: E1203 16:56:05.041809 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts podName:721437ce-17b0-434b-9604-197f795ed1d9 nodeName:}" failed. No retries permitted until 2025-12-03 16:56:05.541794321 +0000 UTC m=+1488.955616209 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts") pod "novacell0be01-account-delete-r7zk7" (UID: "721437ce-17b0-434b-9604-197f795ed1d9") : configmap "openstack-scripts" not found Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.052495 5002 generic.go:334] "Generic (PLEG): container finished" podID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerID="dfddf060fa4fddc35fda40088529fbf414441a192eea714876824df928864e7e" exitCode=0 Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.052535 5002 generic.go:334] "Generic (PLEG): container finished" podID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerID="739bab8b7d0739633fc83f4a91f4b03d89579795c296b2c49a39101f7359c721" exitCode=2 Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.052545 5002 generic.go:334] "Generic (PLEG): container finished" podID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerID="f170d2d5c5453f499381793538a1774fccdd3cc83fae41dd5c62f54865d69fea" exitCode=0 Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.052553 5002 generic.go:334] "Generic (PLEG): container finished" podID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerID="b8df45427a97ecede50bedecdafce707afb7a6c3b99bed2b3b06ade3f707f8c0" exitCode=0 Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.062542 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.070452 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "965b05ab-f8e9-485e-9f15-2160a598d8c2" (UID: "965b05ab-f8e9-485e-9f15-2160a598d8c2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.075131 5002 generic.go:334] "Generic (PLEG): container finished" podID="bea5a03b-519f-4af4-873a-e5e7f9e8f769" containerID="4e5b334c5a3bbdfab01571abbbf387660f52e776af1bc465c880770ae52c4855" exitCode=2 Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.082124 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.094396 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.095064 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.132971 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-config-data" (OuterVolumeSpecName: "config-data") pod "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" (UID: "ec3d3d6b-b1df-4d20-8fc7-ca518ea56929"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.152242 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/965b05ab-f8e9-485e-9f15-2160a598d8c2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.152278 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404148 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b576f304-fd7e-419e-937c-dafaf1c00970","Type":"ContainerDied","Data":"a5c953152a2dc657ee61cb4f66aa54e6b56055c989e05626225aa5ceeb69b1b2"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404202 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-be01-account-create-update-lfstp"] Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404224 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b89c68cbb-nkz44" event={"ID":"55e94451-ebc7-4a6c-9927-df89ae0fc3c2","Type":"ContainerDied","Data":"2ac72ccfe2ed308920637f371bbf7fd278ac3de12deca33a58820b774a39eb81"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404239 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6495d47864-tf6dm" event={"ID":"1c099352-abbe-4c3a-9431-c854e5333420","Type":"ContainerDied","Data":"8714948cd07a1407e7d4a347cc13d0e2b6ac9298baf5382e7f3426c0692d9d66"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404253 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8714948cd07a1407e7d4a347cc13d0e2b6ac9298baf5382e7f3426c0692d9d66" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404269 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-be01-account-create-update-lfstp"] Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404295 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4e408ec9-5662-43a6-93fd-9fa7a60f98db","Type":"ContainerDied","Data":"a31812f2b35f2e5582ee415727310457b192635bb15360c9e32d188a23358eea"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404310 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0be01-account-delete-r7zk7" event={"ID":"721437ce-17b0-434b-9604-197f795ed1d9","Type":"ContainerStarted","Data":"e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404324 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d105ad9d-fbca-4a0c-b188-a88a363756c2","Type":"ContainerDied","Data":"344937d693aca613f5d6c7658a05a5e864b67f0cdbdd5fe03d3655235754d316"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404339 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a","Type":"ContainerDied","Data":"dfddf060fa4fddc35fda40088529fbf414441a192eea714876824df928864e7e"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404355 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a","Type":"ContainerDied","Data":"739bab8b7d0739633fc83f4a91f4b03d89579795c296b2c49a39101f7359c721"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404365 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a","Type":"ContainerDied","Data":"f170d2d5c5453f499381793538a1774fccdd3cc83fae41dd5c62f54865d69fea"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404379 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a","Type":"ContainerDied","Data":"b8df45427a97ecede50bedecdafce707afb7a6c3b99bed2b3b06ade3f707f8c0"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404391 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec3d3d6b-b1df-4d20-8fc7-ca518ea56929","Type":"ContainerDied","Data":"095e1c781a31fb7b65d3f6dfb5bfed87a58fb3e7589277c53b4f7f4bc4ffce5f"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404405 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancea518-account-delete-tg6nl" event={"ID":"bc3c0d8b-823d-42bc-a114-766494075e59","Type":"ContainerDied","Data":"dd0d02cf94b6e3f87153b6254c77ead3fc37774780df2141f856c67f4f13fec2"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404417 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd0d02cf94b6e3f87153b6254c77ead3fc37774780df2141f856c67f4f13fec2" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404427 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bea5a03b-519f-4af4-873a-e5e7f9e8f769","Type":"ContainerDied","Data":"4e5b334c5a3bbdfab01571abbbf387660f52e776af1bc465c880770ae52c4855"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404441 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"5a3af9fa-d550-4d97-8d54-b198f0ca6f31","Type":"ContainerDied","Data":"c97b9d8430bb2816df2d936d883512f1eeee34e66c6ab2013595c183883c634a"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.404455 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"965b05ab-f8e9-485e-9f15-2160a598d8c2","Type":"ContainerDied","Data":"4a1b82ddb6e16ce60bc6498d7b7a32be41d92d2e455b72a8607ee043219a55d3"} Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.452572 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.453599 5002 scope.go:117] "RemoveContainer" containerID="104487232d5020db87327b0c923d164a5647072cdec65c4bd2d6deffa890a181" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.455850 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glancea518-account-delete-tg6nl" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.482012 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.498088 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.514598 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.527893 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.535060 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.552253 5002 scope.go:117] "RemoveContainer" containerID="b7977fd2fc849375b5f24a1f92f0acd219b3d1bff05f0b2a40d3322e005c9013" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.562801 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.563719 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.575821 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-config-data\") pod \"d105ad9d-fbca-4a0c-b188-a88a363756c2\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.575869 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c099352-abbe-4c3a-9431-c854e5333420-logs\") pod \"1c099352-abbe-4c3a-9431-c854e5333420\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.575892 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-scripts\") pod \"d105ad9d-fbca-4a0c-b188-a88a363756c2\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.575927 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-combined-ca-bundle\") pod \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.575975 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-state-metrics-tls-certs\") pod \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.576010 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-public-tls-certs\") pod \"d105ad9d-fbca-4a0c-b188-a88a363756c2\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.576028 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wmbn\" (UniqueName: \"kubernetes.io/projected/1c099352-abbe-4c3a-9431-c854e5333420-kube-api-access-8wmbn\") pod \"1c099352-abbe-4c3a-9431-c854e5333420\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.576051 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpwdm\" (UniqueName: \"kubernetes.io/projected/bc3c0d8b-823d-42bc-a114-766494075e59-kube-api-access-qpwdm\") pod \"bc3c0d8b-823d-42bc-a114-766494075e59\" (UID: \"bc3c0d8b-823d-42bc-a114-766494075e59\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.576077 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-internal-tls-certs\") pod \"b576f304-fd7e-419e-937c-dafaf1c00970\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.576100 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d105ad9d-fbca-4a0c-b188-a88a363756c2-logs\") pod \"d105ad9d-fbca-4a0c-b188-a88a363756c2\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.576116 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b576f304-fd7e-419e-937c-dafaf1c00970-logs\") pod \"b576f304-fd7e-419e-937c-dafaf1c00970\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.576146 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-config-data\") pod \"b576f304-fd7e-419e-937c-dafaf1c00970\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.576170 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc3c0d8b-823d-42bc-a114-766494075e59-operator-scripts\") pod \"bc3c0d8b-823d-42bc-a114-766494075e59\" (UID: \"bc3c0d8b-823d-42bc-a114-766494075e59\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.576204 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-combined-ca-bundle\") pod \"1c099352-abbe-4c3a-9431-c854e5333420\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.576890 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c099352-abbe-4c3a-9431-c854e5333420-logs" (OuterVolumeSpecName: "logs") pod "1c099352-abbe-4c3a-9431-c854e5333420" (UID: "1c099352-abbe-4c3a-9431-c854e5333420"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.577120 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpsvc\" (UniqueName: \"kubernetes.io/projected/d105ad9d-fbca-4a0c-b188-a88a363756c2-kube-api-access-wpsvc\") pod \"d105ad9d-fbca-4a0c-b188-a88a363756c2\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.577163 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-internal-tls-certs\") pod \"1c099352-abbe-4c3a-9431-c854e5333420\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.577184 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-combined-ca-bundle\") pod \"d105ad9d-fbca-4a0c-b188-a88a363756c2\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.577233 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"d105ad9d-fbca-4a0c-b188-a88a363756c2\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.577250 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nxsv\" (UniqueName: \"kubernetes.io/projected/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-api-access-2nxsv\") pod \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.577506 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-combined-ca-bundle\") pod \"b576f304-fd7e-419e-937c-dafaf1c00970\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.577542 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-scripts\") pod \"1c099352-abbe-4c3a-9431-c854e5333420\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.582003 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-config-data\") pod \"1c099352-abbe-4c3a-9431-c854e5333420\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.582081 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbv2k\" (UniqueName: \"kubernetes.io/projected/b576f304-fd7e-419e-937c-dafaf1c00970-kube-api-access-xbv2k\") pod \"b576f304-fd7e-419e-937c-dafaf1c00970\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.582120 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d105ad9d-fbca-4a0c-b188-a88a363756c2-httpd-run\") pod \"d105ad9d-fbca-4a0c-b188-a88a363756c2\" (UID: \"d105ad9d-fbca-4a0c-b188-a88a363756c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.582142 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-public-tls-certs\") pod \"b576f304-fd7e-419e-937c-dafaf1c00970\" (UID: \"b576f304-fd7e-419e-937c-dafaf1c00970\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.582177 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-public-tls-certs\") pod \"1c099352-abbe-4c3a-9431-c854e5333420\" (UID: \"1c099352-abbe-4c3a-9431-c854e5333420\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.582207 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-state-metrics-tls-config\") pod \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\" (UID: \"bea5a03b-519f-4af4-873a-e5e7f9e8f769\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.584637 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d105ad9d-fbca-4a0c-b188-a88a363756c2-kube-api-access-wpsvc" (OuterVolumeSpecName: "kube-api-access-wpsvc") pod "d105ad9d-fbca-4a0c-b188-a88a363756c2" (UID: "d105ad9d-fbca-4a0c-b188-a88a363756c2"). InnerVolumeSpecName "kube-api-access-wpsvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.592395 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d105ad9d-fbca-4a0c-b188-a88a363756c2-logs" (OuterVolumeSpecName: "logs") pod "d105ad9d-fbca-4a0c-b188-a88a363756c2" (UID: "d105ad9d-fbca-4a0c-b188-a88a363756c2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.593677 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b576f304-fd7e-419e-937c-dafaf1c00970-logs" (OuterVolumeSpecName: "logs") pod "b576f304-fd7e-419e-937c-dafaf1c00970" (UID: "b576f304-fd7e-419e-937c-dafaf1c00970"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.594823 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-api-access-2nxsv" (OuterVolumeSpecName: "kube-api-access-2nxsv") pod "bea5a03b-519f-4af4-873a-e5e7f9e8f769" (UID: "bea5a03b-519f-4af4-873a-e5e7f9e8f769"). InnerVolumeSpecName "kube-api-access-2nxsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.595603 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc3c0d8b-823d-42bc-a114-766494075e59-kube-api-access-qpwdm" (OuterVolumeSpecName: "kube-api-access-qpwdm") pod "bc3c0d8b-823d-42bc-a114-766494075e59" (UID: "bc3c0d8b-823d-42bc-a114-766494075e59"). InnerVolumeSpecName "kube-api-access-qpwdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.595976 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc3c0d8b-823d-42bc-a114-766494075e59-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bc3c0d8b-823d-42bc-a114-766494075e59" (UID: "bc3c0d8b-823d-42bc-a114-766494075e59"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.598402 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d105ad9d-fbca-4a0c-b188-a88a363756c2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d105ad9d-fbca-4a0c-b188-a88a363756c2" (UID: "d105ad9d-fbca-4a0c-b188-a88a363756c2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.603426 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.604757 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-scripts" (OuterVolumeSpecName: "scripts") pod "1c099352-abbe-4c3a-9431-c854e5333420" (UID: "1c099352-abbe-4c3a-9431-c854e5333420"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.606436 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.606703 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "d105ad9d-fbca-4a0c-b188-a88a363756c2" (UID: "d105ad9d-fbca-4a0c-b188-a88a363756c2"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.606788 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b576f304-fd7e-419e-937c-dafaf1c00970-kube-api-access-xbv2k" (OuterVolumeSpecName: "kube-api-access-xbv2k") pod "b576f304-fd7e-419e-937c-dafaf1c00970" (UID: "b576f304-fd7e-419e-937c-dafaf1c00970"). InnerVolumeSpecName "kube-api-access-xbv2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.607433 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-scripts" (OuterVolumeSpecName: "scripts") pod "d105ad9d-fbca-4a0c-b188-a88a363756c2" (UID: "d105ad9d-fbca-4a0c-b188-a88a363756c2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.607668 5002 scope.go:117] "RemoveContainer" containerID="d908e5d007ac194ec3ebbceec4f006b453c1be2fbf1833110a61991b4704e296" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.609671 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c099352-abbe-4c3a-9431-c854e5333420-kube-api-access-8wmbn" (OuterVolumeSpecName: "kube-api-access-8wmbn") pod "1c099352-abbe-4c3a-9431-c854e5333420" (UID: "1c099352-abbe-4c3a-9431-c854e5333420"). InnerVolumeSpecName "kube-api-access-8wmbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.610553 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:56:05 crc kubenswrapper[5002]: E1203 16:56:05.612822 5002 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 03 16:56:05 crc kubenswrapper[5002]: E1203 16:56:05.613055 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts podName:721437ce-17b0-434b-9604-197f795ed1d9 nodeName:}" failed. No retries permitted until 2025-12-03 16:56:06.61302605 +0000 UTC m=+1490.026847928 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts") pod "novacell0be01-account-delete-r7zk7" (UID: "721437ce-17b0-434b-9604-197f795ed1d9") : configmap "openstack-scripts" not found Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613516 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d105ad9d-fbca-4a0c-b188-a88a363756c2-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613529 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c099352-abbe-4c3a-9431-c854e5333420-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613537 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613545 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wmbn\" (UniqueName: \"kubernetes.io/projected/1c099352-abbe-4c3a-9431-c854e5333420-kube-api-access-8wmbn\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613556 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpwdm\" (UniqueName: \"kubernetes.io/projected/bc3c0d8b-823d-42bc-a114-766494075e59-kube-api-access-qpwdm\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613565 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d105ad9d-fbca-4a0c-b188-a88a363756c2-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613575 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b576f304-fd7e-419e-937c-dafaf1c00970-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613583 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc3c0d8b-823d-42bc-a114-766494075e59-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613591 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpsvc\" (UniqueName: \"kubernetes.io/projected/d105ad9d-fbca-4a0c-b188-a88a363756c2-kube-api-access-wpsvc\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613610 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613620 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nxsv\" (UniqueName: \"kubernetes.io/projected/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-api-access-2nxsv\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613628 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.613637 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbv2k\" (UniqueName: \"kubernetes.io/projected/b576f304-fd7e-419e-937c-dafaf1c00970-kube-api-access-xbv2k\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.639932 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.642324 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d105ad9d-fbca-4a0c-b188-a88a363756c2" (UID: "d105ad9d-fbca-4a0c-b188-a88a363756c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.648886 5002 scope.go:117] "RemoveContainer" containerID="d8f143f4c81641c282592b60722c3b70b5b045ac7605f798b8c288b1a27c879f" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.652638 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.661783 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.672331 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.714891 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-config-data\") pod \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.714959 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-nova-metadata-tls-certs\") pod \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.714985 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-public-tls-certs\") pod \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.715030 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-config-data-custom\") pod \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.715074 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-config-data\") pod \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.715156 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e408ec9-5662-43a6-93fd-9fa7a60f98db-logs\") pod \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.715230 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-896gt\" (UniqueName: \"kubernetes.io/projected/4e408ec9-5662-43a6-93fd-9fa7a60f98db-kube-api-access-896gt\") pod \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.715290 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wrgn\" (UniqueName: \"kubernetes.io/projected/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-kube-api-access-8wrgn\") pod \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.715359 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-internal-tls-certs\") pod \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.715396 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-logs\") pod \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.715419 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-combined-ca-bundle\") pod \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\" (UID: \"55e94451-ebc7-4a6c-9927-df89ae0fc3c2\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.715483 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-combined-ca-bundle\") pod \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\" (UID: \"4e408ec9-5662-43a6-93fd-9fa7a60f98db\") " Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.715977 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.715998 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.719086 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e408ec9-5662-43a6-93fd-9fa7a60f98db-logs" (OuterVolumeSpecName: "logs") pod "4e408ec9-5662-43a6-93fd-9fa7a60f98db" (UID: "4e408ec9-5662-43a6-93fd-9fa7a60f98db"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.719553 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-logs" (OuterVolumeSpecName: "logs") pod "55e94451-ebc7-4a6c-9927-df89ae0fc3c2" (UID: "55e94451-ebc7-4a6c-9927-df89ae0fc3c2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.723147 5002 scope.go:117] "RemoveContainer" containerID="37c6c1754413201dda18005715d872051feb0d1aef179748ad24d74f8d9e9696" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.729978 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e408ec9-5662-43a6-93fd-9fa7a60f98db-kube-api-access-896gt" (OuterVolumeSpecName: "kube-api-access-896gt") pod "4e408ec9-5662-43a6-93fd-9fa7a60f98db" (UID: "4e408ec9-5662-43a6-93fd-9fa7a60f98db"). InnerVolumeSpecName "kube-api-access-896gt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.730877 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "55e94451-ebc7-4a6c-9927-df89ae0fc3c2" (UID: "55e94451-ebc7-4a6c-9927-df89ae0fc3c2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.732635 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "bea5a03b-519f-4af4-873a-e5e7f9e8f769" (UID: "bea5a03b-519f-4af4-873a-e5e7f9e8f769"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.733905 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-kube-api-access-8wrgn" (OuterVolumeSpecName: "kube-api-access-8wrgn") pod "55e94451-ebc7-4a6c-9927-df89ae0fc3c2" (UID: "55e94451-ebc7-4a6c-9927-df89ae0fc3c2"). InnerVolumeSpecName "kube-api-access-8wrgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.825363 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e408ec9-5662-43a6-93fd-9fa7a60f98db-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.825390 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-896gt\" (UniqueName: \"kubernetes.io/projected/4e408ec9-5662-43a6-93fd-9fa7a60f98db-kube-api-access-896gt\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.825400 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wrgn\" (UniqueName: \"kubernetes.io/projected/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-kube-api-access-8wrgn\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.825409 5002 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.825425 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.825435 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.869030 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b576f304-fd7e-419e-937c-dafaf1c00970" (UID: "b576f304-fd7e-419e-937c-dafaf1c00970"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.869864 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bea5a03b-519f-4af4-873a-e5e7f9e8f769" (UID: "bea5a03b-519f-4af4-873a-e5e7f9e8f769"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.926804 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.926831 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.927739 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b576f304-fd7e-419e-937c-dafaf1c00970" (UID: "b576f304-fd7e-419e-937c-dafaf1c00970"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.934244 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-config-data" (OuterVolumeSpecName: "config-data") pod "d105ad9d-fbca-4a0c-b188-a88a363756c2" (UID: "d105ad9d-fbca-4a0c-b188-a88a363756c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.936981 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-config-data" (OuterVolumeSpecName: "config-data") pod "b576f304-fd7e-419e-937c-dafaf1c00970" (UID: "b576f304-fd7e-419e-937c-dafaf1c00970"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.955839 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "bea5a03b-519f-4af4-873a-e5e7f9e8f769" (UID: "bea5a03b-519f-4af4-873a-e5e7f9e8f769"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.959623 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e408ec9-5662-43a6-93fd-9fa7a60f98db" (UID: "4e408ec9-5662-43a6-93fd-9fa7a60f98db"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.966051 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c099352-abbe-4c3a-9431-c854e5333420" (UID: "1c099352-abbe-4c3a-9431-c854e5333420"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:05 crc kubenswrapper[5002]: I1203 16:56:05.991482 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "55e94451-ebc7-4a6c-9927-df89ae0fc3c2" (UID: "55e94451-ebc7-4a6c-9927-df89ae0fc3c2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.001640 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "55e94451-ebc7-4a6c-9927-df89ae0fc3c2" (UID: "55e94451-ebc7-4a6c-9927-df89ae0fc3c2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.002074 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b576f304-fd7e-419e-937c-dafaf1c00970" (UID: "b576f304-fd7e-419e-937c-dafaf1c00970"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.027439 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "55e94451-ebc7-4a6c-9927-df89ae0fc3c2" (UID: "55e94451-ebc7-4a6c-9927-df89ae0fc3c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.028445 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.028470 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.028481 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.028491 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.028500 5002 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/bea5a03b-519f-4af4-873a-e5e7f9e8f769-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.028510 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.028520 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.028531 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.028539 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.028546 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b576f304-fd7e-419e-937c-dafaf1c00970-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.040790 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d105ad9d-fbca-4a0c-b188-a88a363756c2" (UID: "d105ad9d-fbca-4a0c-b188-a88a363756c2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.093425 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1c099352-abbe-4c3a-9431-c854e5333420" (UID: "1c099352-abbe-4c3a-9431-c854e5333420"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.109472 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-config-data" (OuterVolumeSpecName: "config-data") pod "55e94451-ebc7-4a6c-9927-df89ae0fc3c2" (UID: "55e94451-ebc7-4a6c-9927-df89ae0fc3c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.120969 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-config-data" (OuterVolumeSpecName: "config-data") pod "4e408ec9-5662-43a6-93fd-9fa7a60f98db" (UID: "4e408ec9-5662-43a6-93fd-9fa7a60f98db"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.122592 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d105ad9d-fbca-4a0c-b188-a88a363756c2","Type":"ContainerDied","Data":"4dd01f4f0edb9fffdc14edcd8409635fe351d4bce2c3fbc1b656dc5ffe6a54ad"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.122711 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.135431 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b576f304-fd7e-419e-937c-dafaf1c00970","Type":"ContainerDied","Data":"7ecdd1c601083ca7243db68655892feaf80e4f21bb99ba10c65f800bc452a956"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.135530 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.144168 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi8fda-account-delete-h8c2d" event={"ID":"33f61f67-5ef5-41a9-8bea-3335115b78e9","Type":"ContainerDied","Data":"4af45ecb2aea8e64aa1dffce985c414bfe8d6d653169e479a7bc596c1f30b422"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.144215 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4af45ecb2aea8e64aa1dffce985c414bfe8d6d653169e479a7bc596c1f30b422" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.144869 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.144963 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55e94451-ebc7-4a6c-9927-df89ae0fc3c2-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.145053 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d105ad9d-fbca-4a0c-b188-a88a363756c2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.145124 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.164112 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronb96b-account-delete-ttg76" event={"ID":"13c5aef5-d2f5-4449-8cce-125cdf61d06b","Type":"ContainerDied","Data":"3d8d5655e5d3b4c3dfc185dad2f1771c6d25c64a8ed411c48a49d9be4abde856"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.164152 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d8d5655e5d3b4c3dfc185dad2f1771c6d25c64a8ed411c48a49d9be4abde856" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.179337 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4e408ec9-5662-43a6-93fd-9fa7a60f98db","Type":"ContainerDied","Data":"bbe2261e50861c45e2046d54d52377830a569545ea180eed78bd4c9bab1a1985"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.179441 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.181821 5002 generic.go:334] "Generic (PLEG): container finished" podID="4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7" containerID="fcdafeed97d1bf71baec2b1679ea4a83c44d81bf0870a234df0c8a45883f5c37" exitCode=0 Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.181896 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7","Type":"ContainerDied","Data":"fcdafeed97d1bf71baec2b1679ea4a83c44d81bf0870a234df0c8a45883f5c37"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.181935 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7","Type":"ContainerDied","Data":"66f67ba3873bdf4919add8f6b9e00a994f2feb3122f9ada2e6fc4a7f619ff451"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.181947 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66f67ba3873bdf4919add8f6b9e00a994f2feb3122f9ada2e6fc4a7f619ff451" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.182352 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-cell1-novncproxy-0" podUID="922c692b-3d5c-45df-862d-d4e08b06fe0b" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.0.193:6080/vnc_lite.html\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.184824 5002 generic.go:334] "Generic (PLEG): container finished" podID="36469a67-4d79-419f-9aaf-a1c128132287" containerID="4135f456101b8d8bc1d6aa3e05e38d4e5f0009555d8fd482836038f10a45877a" exitCode=0 Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.184870 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"36469a67-4d79-419f-9aaf-a1c128132287","Type":"ContainerDied","Data":"4135f456101b8d8bc1d6aa3e05e38d4e5f0009555d8fd482836038f10a45877a"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.184888 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"36469a67-4d79-419f-9aaf-a1c128132287","Type":"ContainerDied","Data":"2d492b8022017b9dbc249e071e8d5a9308210ceff225b48a8d905e35c20c28a8"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.184899 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d492b8022017b9dbc249e071e8d5a9308210ceff225b48a8d905e35c20c28a8" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.193568 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a","Type":"ContainerDied","Data":"39344bf2d53f53c9398beed65520ac747f53c24078d65bec6de505a9dcccfc9c"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.193598 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39344bf2d53f53c9398beed65520ac747f53c24078d65bec6de505a9dcccfc9c" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.196192 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b89c68cbb-nkz44" event={"ID":"55e94451-ebc7-4a6c-9927-df89ae0fc3c2","Type":"ContainerDied","Data":"f37222b16941070545bc5cdbfabb844500ba2cc8fe4270bf53f73f00520a5183"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.196284 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b89c68cbb-nkz44" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.210759 5002 scope.go:117] "RemoveContainer" containerID="66caf906a5bd8b8162200583f4b593bcda499f2ebcd4fa929fb768cd57da8948" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.211454 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder5ccf-account-delete-j49bd" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.216545 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bea5a03b-519f-4af4-873a-e5e7f9e8f769","Type":"ContainerDied","Data":"37c8a1a7bef3f640b4ba04007ab8fe123e7dd234e6143f92b901a859c32cfca3"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.216622 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.221907 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.227475 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder5ccf-account-delete-j49bd" event={"ID":"de753fc7-23ae-4680-9d4c-11e5632d749d","Type":"ContainerDied","Data":"7493a23574d5d7c151518152bfe01d7eeb977c23a3f2b88081d2bf4a38f98d94"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.227553 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7493a23574d5d7c151518152bfe01d7eeb977c23a3f2b88081d2bf4a38f98d94" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.227701 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder5ccf-account-delete-j49bd" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.231323 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.238324 5002 generic.go:334] "Generic (PLEG): container finished" podID="852cfff4-0855-40ab-a82d-b560c37118bf" containerID="2ba9a566a71354e8ef6bda7b9341c58e4f1b4fb5649b671ff2b3c91f0cb5d63d" exitCode=0 Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.238419 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glancea518-account-delete-tg6nl" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.238849 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"852cfff4-0855-40ab-a82d-b560c37118bf","Type":"ContainerDied","Data":"2ba9a566a71354e8ef6bda7b9341c58e4f1b4fb5649b671ff2b3c91f0cb5d63d"} Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.239053 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6495d47864-tf6dm" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.239573 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novacell0be01-account-delete-r7zk7" podUID="721437ce-17b0-434b-9604-197f795ed1d9" containerName="mariadb-account-delete" containerID="cri-o://e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f" gracePeriod=30 Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.244428 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.250579 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "4e408ec9-5662-43a6-93fd-9fa7a60f98db" (UID: "4e408ec9-5662-43a6-93fd-9fa7a60f98db"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.259409 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi8fda-account-delete-h8c2d" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.261569 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1c099352-abbe-4c3a-9431-c854e5333420" (UID: "1c099352-abbe-4c3a-9431-c854e5333420"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.263506 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6b89c68cbb-nkz44"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.270797 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6b89c68cbb-nkz44"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.271064 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-config-data" (OuterVolumeSpecName: "config-data") pod "1c099352-abbe-4c3a-9431-c854e5333420" (UID: "1c099352-abbe-4c3a-9431-c854e5333420"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.272071 5002 scope.go:117] "RemoveContainer" containerID="2c7e91463c94f3cc50714dbf46be64e0f5e71d87a2dbb56ce0212f820db59c76" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.275960 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronb96b-account-delete-ttg76" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.294843 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.309621 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.323220 5002 scope.go:117] "RemoveContainer" containerID="7289b73f03ab749a5e919fc53efa0fd71d6720a44c71c21cabbcd8b11406b07e" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.323714 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican8d7a-account-delete-fx9r7" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.329249 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placemente6cd-account-delete-wpdgt" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.349980 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgd8t\" (UniqueName: \"kubernetes.io/projected/36469a67-4d79-419f-9aaf-a1c128132287-kube-api-access-dgd8t\") pod \"36469a67-4d79-419f-9aaf-a1c128132287\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.350024 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36469a67-4d79-419f-9aaf-a1c128132287-etc-machine-id\") pod \"36469a67-4d79-419f-9aaf-a1c128132287\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.350139 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-scripts\") pod \"36469a67-4d79-419f-9aaf-a1c128132287\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.350219 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de753fc7-23ae-4680-9d4c-11e5632d749d-operator-scripts\") pod \"de753fc7-23ae-4680-9d4c-11e5632d749d\" (UID: \"de753fc7-23ae-4680-9d4c-11e5632d749d\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.350454 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-combined-ca-bundle\") pod \"36469a67-4d79-419f-9aaf-a1c128132287\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.350518 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13c5aef5-d2f5-4449-8cce-125cdf61d06b-operator-scripts\") pod \"13c5aef5-d2f5-4449-8cce-125cdf61d06b\" (UID: \"13c5aef5-d2f5-4449-8cce-125cdf61d06b\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.350582 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-config-data-custom\") pod \"36469a67-4d79-419f-9aaf-a1c128132287\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.350599 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f61f67-5ef5-41a9-8bea-3335115b78e9-operator-scripts\") pod \"33f61f67-5ef5-41a9-8bea-3335115b78e9\" (UID: \"33f61f67-5ef5-41a9-8bea-3335115b78e9\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.350623 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdrx2\" (UniqueName: \"kubernetes.io/projected/33f61f67-5ef5-41a9-8bea-3335115b78e9-kube-api-access-kdrx2\") pod \"33f61f67-5ef5-41a9-8bea-3335115b78e9\" (UID: \"33f61f67-5ef5-41a9-8bea-3335115b78e9\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.350659 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmcjw\" (UniqueName: \"kubernetes.io/projected/13c5aef5-d2f5-4449-8cce-125cdf61d06b-kube-api-access-bmcjw\") pod \"13c5aef5-d2f5-4449-8cce-125cdf61d06b\" (UID: \"13c5aef5-d2f5-4449-8cce-125cdf61d06b\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.350683 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-config-data\") pod \"36469a67-4d79-419f-9aaf-a1c128132287\" (UID: \"36469a67-4d79-419f-9aaf-a1c128132287\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.350700 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26lhf\" (UniqueName: \"kubernetes.io/projected/de753fc7-23ae-4680-9d4c-11e5632d749d-kube-api-access-26lhf\") pod \"de753fc7-23ae-4680-9d4c-11e5632d749d\" (UID: \"de753fc7-23ae-4680-9d4c-11e5632d749d\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.351185 5002 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e408ec9-5662-43a6-93fd-9fa7a60f98db-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.351202 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.351212 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c099352-abbe-4c3a-9431-c854e5333420-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.354224 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13c5aef5-d2f5-4449-8cce-125cdf61d06b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "13c5aef5-d2f5-4449-8cce-125cdf61d06b" (UID: "13c5aef5-d2f5-4449-8cce-125cdf61d06b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.355768 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36469a67-4d79-419f-9aaf-a1c128132287-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "36469a67-4d79-419f-9aaf-a1c128132287" (UID: "36469a67-4d79-419f-9aaf-a1c128132287"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.356196 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33f61f67-5ef5-41a9-8bea-3335115b78e9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "33f61f67-5ef5-41a9-8bea-3335115b78e9" (UID: "33f61f67-5ef5-41a9-8bea-3335115b78e9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.356330 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de753fc7-23ae-4680-9d4c-11e5632d749d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "de753fc7-23ae-4680-9d4c-11e5632d749d" (UID: "de753fc7-23ae-4680-9d4c-11e5632d749d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.356717 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de753fc7-23ae-4680-9d4c-11e5632d749d-kube-api-access-26lhf" (OuterVolumeSpecName: "kube-api-access-26lhf") pod "de753fc7-23ae-4680-9d4c-11e5632d749d" (UID: "de753fc7-23ae-4680-9d4c-11e5632d749d"). InnerVolumeSpecName "kube-api-access-26lhf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.360481 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-scripts" (OuterVolumeSpecName: "scripts") pod "36469a67-4d79-419f-9aaf-a1c128132287" (UID: "36469a67-4d79-419f-9aaf-a1c128132287"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.361683 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "36469a67-4d79-419f-9aaf-a1c128132287" (UID: "36469a67-4d79-419f-9aaf-a1c128132287"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.361731 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36469a67-4d79-419f-9aaf-a1c128132287-kube-api-access-dgd8t" (OuterVolumeSpecName: "kube-api-access-dgd8t") pod "36469a67-4d79-419f-9aaf-a1c128132287" (UID: "36469a67-4d79-419f-9aaf-a1c128132287"). InnerVolumeSpecName "kube-api-access-dgd8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.362438 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33f61f67-5ef5-41a9-8bea-3335115b78e9-kube-api-access-kdrx2" (OuterVolumeSpecName: "kube-api-access-kdrx2") pod "33f61f67-5ef5-41a9-8bea-3335115b78e9" (UID: "33f61f67-5ef5-41a9-8bea-3335115b78e9"). InnerVolumeSpecName "kube-api-access-kdrx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.363114 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.366687 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13c5aef5-d2f5-4449-8cce-125cdf61d06b-kube-api-access-bmcjw" (OuterVolumeSpecName: "kube-api-access-bmcjw") pod "13c5aef5-d2f5-4449-8cce-125cdf61d06b" (UID: "13c5aef5-d2f5-4449-8cce-125cdf61d06b"). InnerVolumeSpecName "kube-api-access-bmcjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.371183 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.371379 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.380627 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.385052 5002 scope.go:117] "RemoveContainer" containerID="344937d693aca613f5d6c7658a05a5e864b67f0cdbdd5fe03d3655235754d316" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.387706 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.417617 5002 scope.go:117] "RemoveContainer" containerID="c338030fdc0b9dd2210114f065d62d29e7ab549361d1a0383112a39e58c2dc38" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.450727 5002 scope.go:117] "RemoveContainer" containerID="a5c953152a2dc657ee61cb4f66aa54e6b56055c989e05626225aa5ceeb69b1b2" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.451008 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36469a67-4d79-419f-9aaf-a1c128132287" (UID: "36469a67-4d79-419f-9aaf-a1c128132287"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.451838 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mf66x\" (UniqueName: \"kubernetes.io/projected/a9922091-0d6f-44cf-9b98-8b97a811ea26-kube-api-access-mf66x\") pod \"a9922091-0d6f-44cf-9b98-8b97a811ea26\" (UID: \"a9922091-0d6f-44cf-9b98-8b97a811ea26\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.451867 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-combined-ca-bundle\") pod \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.451893 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-config-data\") pod \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\" (UID: \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.451916 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9922091-0d6f-44cf-9b98-8b97a811ea26-operator-scripts\") pod \"a9922091-0d6f-44cf-9b98-8b97a811ea26\" (UID: \"a9922091-0d6f-44cf-9b98-8b97a811ea26\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.451965 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/852cfff4-0855-40ab-a82d-b560c37118bf-config-data\") pod \"852cfff4-0855-40ab-a82d-b560c37118bf\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.451988 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-sg-core-conf-yaml\") pod \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452004 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7jrt\" (UniqueName: \"kubernetes.io/projected/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-kube-api-access-s7jrt\") pod \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\" (UID: \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452025 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05d0c2be-6525-4ec1-bcae-e240255c970a-operator-scripts\") pod \"05d0c2be-6525-4ec1-bcae-e240255c970a\" (UID: \"05d0c2be-6525-4ec1-bcae-e240255c970a\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452067 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-run-httpd\") pod \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452092 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-combined-ca-bundle\") pod \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\" (UID: \"4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452131 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qw29s\" (UniqueName: \"kubernetes.io/projected/852cfff4-0855-40ab-a82d-b560c37118bf-kube-api-access-qw29s\") pod \"852cfff4-0855-40ab-a82d-b560c37118bf\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452155 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-ceilometer-tls-certs\") pod \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452187 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/852cfff4-0855-40ab-a82d-b560c37118bf-kolla-config\") pod \"852cfff4-0855-40ab-a82d-b560c37118bf\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452209 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdcdk\" (UniqueName: \"kubernetes.io/projected/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-kube-api-access-xdcdk\") pod \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452246 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-log-httpd\") pod \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452268 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-scripts\") pod \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452304 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-config-data\") pod \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\" (UID: \"f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452321 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/852cfff4-0855-40ab-a82d-b560c37118bf-memcached-tls-certs\") pod \"852cfff4-0855-40ab-a82d-b560c37118bf\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452351 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/852cfff4-0855-40ab-a82d-b560c37118bf-combined-ca-bundle\") pod \"852cfff4-0855-40ab-a82d-b560c37118bf\" (UID: \"852cfff4-0855-40ab-a82d-b560c37118bf\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.452380 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvzmx\" (UniqueName: \"kubernetes.io/projected/05d0c2be-6525-4ec1-bcae-e240255c970a-kube-api-access-qvzmx\") pod \"05d0c2be-6525-4ec1-bcae-e240255c970a\" (UID: \"05d0c2be-6525-4ec1-bcae-e240255c970a\") " Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.453712 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9922091-0d6f-44cf-9b98-8b97a811ea26-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a9922091-0d6f-44cf-9b98-8b97a811ea26" (UID: "a9922091-0d6f-44cf-9b98-8b97a811ea26"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.455010 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" (UID: "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456327 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9922091-0d6f-44cf-9b98-8b97a811ea26-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456359 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456372 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f61f67-5ef5-41a9-8bea-3335115b78e9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456385 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdrx2\" (UniqueName: \"kubernetes.io/projected/33f61f67-5ef5-41a9-8bea-3335115b78e9-kube-api-access-kdrx2\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456400 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26lhf\" (UniqueName: \"kubernetes.io/projected/de753fc7-23ae-4680-9d4c-11e5632d749d-kube-api-access-26lhf\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456411 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmcjw\" (UniqueName: \"kubernetes.io/projected/13c5aef5-d2f5-4449-8cce-125cdf61d06b-kube-api-access-bmcjw\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456423 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456435 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgd8t\" (UniqueName: \"kubernetes.io/projected/36469a67-4d79-419f-9aaf-a1c128132287-kube-api-access-dgd8t\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456448 5002 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36469a67-4d79-419f-9aaf-a1c128132287-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456459 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456471 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de753fc7-23ae-4680-9d4c-11e5632d749d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456482 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456493 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13c5aef5-d2f5-4449-8cce-125cdf61d06b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: E1203 16:56:06.456555 5002 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 03 16:56:06 crc kubenswrapper[5002]: E1203 16:56:06.456602 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data podName:e7a598f1-0f32-448c-b08f-b5b0e70f583d nodeName:}" failed. No retries permitted until 2025-12-03 16:56:14.456585137 +0000 UTC m=+1497.870407025 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data") pod "rabbitmq-cell1-server-0" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d") : configmap "rabbitmq-cell1-config-data" not found Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.456725 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/852cfff4-0855-40ab-a82d-b560c37118bf-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "852cfff4-0855-40ab-a82d-b560c37118bf" (UID: "852cfff4-0855-40ab-a82d-b560c37118bf"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.463151 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/852cfff4-0855-40ab-a82d-b560c37118bf-config-data" (OuterVolumeSpecName: "config-data") pod "852cfff4-0855-40ab-a82d-b560c37118bf" (UID: "852cfff4-0855-40ab-a82d-b560c37118bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.466352 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" (UID: "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.468152 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05d0c2be-6525-4ec1-bcae-e240255c970a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "05d0c2be-6525-4ec1-bcae-e240255c970a" (UID: "05d0c2be-6525-4ec1-bcae-e240255c970a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.470315 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05d0c2be-6525-4ec1-bcae-e240255c970a-kube-api-access-qvzmx" (OuterVolumeSpecName: "kube-api-access-qvzmx") pod "05d0c2be-6525-4ec1-bcae-e240255c970a" (UID: "05d0c2be-6525-4ec1-bcae-e240255c970a"). InnerVolumeSpecName "kube-api-access-qvzmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.473333 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9922091-0d6f-44cf-9b98-8b97a811ea26-kube-api-access-mf66x" (OuterVolumeSpecName: "kube-api-access-mf66x") pod "a9922091-0d6f-44cf-9b98-8b97a811ea26" (UID: "a9922091-0d6f-44cf-9b98-8b97a811ea26"). InnerVolumeSpecName "kube-api-access-mf66x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.473381 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-kube-api-access-xdcdk" (OuterVolumeSpecName: "kube-api-access-xdcdk") pod "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" (UID: "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a"). InnerVolumeSpecName "kube-api-access-xdcdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.473407 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/852cfff4-0855-40ab-a82d-b560c37118bf-kube-api-access-qw29s" (OuterVolumeSpecName: "kube-api-access-qw29s") pod "852cfff4-0855-40ab-a82d-b560c37118bf" (UID: "852cfff4-0855-40ab-a82d-b560c37118bf"). InnerVolumeSpecName "kube-api-access-qw29s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.473430 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-kube-api-access-s7jrt" (OuterVolumeSpecName: "kube-api-access-s7jrt") pod "4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7" (UID: "4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7"). InnerVolumeSpecName "kube-api-access-s7jrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.478995 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-scripts" (OuterVolumeSpecName: "scripts") pod "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" (UID: "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.499042 5002 scope.go:117] "RemoveContainer" containerID="b860ed590860a3a1a54cd9b7b0258210275e0aeba1d0db0458d40dced1fb66a7" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.529149 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" (UID: "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.529239 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-config-data" (OuterVolumeSpecName: "config-data") pod "4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7" (UID: "4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.529698 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7" (UID: "4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.558874 5002 scope.go:117] "RemoveContainer" containerID="a31812f2b35f2e5582ee415727310457b192635bb15360c9e32d188a23358eea" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571163 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571200 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qw29s\" (UniqueName: \"kubernetes.io/projected/852cfff4-0855-40ab-a82d-b560c37118bf-kube-api-access-qw29s\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571220 5002 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/852cfff4-0855-40ab-a82d-b560c37118bf-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571235 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdcdk\" (UniqueName: \"kubernetes.io/projected/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-kube-api-access-xdcdk\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571247 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571263 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571277 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvzmx\" (UniqueName: \"kubernetes.io/projected/05d0c2be-6525-4ec1-bcae-e240255c970a-kube-api-access-qvzmx\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571289 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mf66x\" (UniqueName: \"kubernetes.io/projected/a9922091-0d6f-44cf-9b98-8b97a811ea26-kube-api-access-mf66x\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571300 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571315 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/852cfff4-0855-40ab-a82d-b560c37118bf-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571329 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571342 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7jrt\" (UniqueName: \"kubernetes.io/projected/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7-kube-api-access-s7jrt\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571354 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05d0c2be-6525-4ec1-bcae-e240255c970a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.571302 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" (UID: "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.604012 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.609216 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" (UID: "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.615275 5002 scope.go:117] "RemoveContainer" containerID="5e6958690a01bcd45b235c0f78e4750c3aa535d1ce524a8ad09dfce393a6bba0" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.632278 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-config-data" (OuterVolumeSpecName: "config-data") pod "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" (UID: "f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.632179 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/852cfff4-0855-40ab-a82d-b560c37118bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "852cfff4-0855-40ab-a82d-b560c37118bf" (UID: "852cfff4-0855-40ab-a82d-b560c37118bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.653100 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.662980 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder5ccf-account-delete-j49bd"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.668945 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/852cfff4-0855-40ab-a82d-b560c37118bf-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "852cfff4-0855-40ab-a82d-b560c37118bf" (UID: "852cfff4-0855-40ab-a82d-b560c37118bf"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.670230 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder5ccf-account-delete-j49bd"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.672471 5002 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/852cfff4-0855-40ab-a82d-b560c37118bf-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.672499 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.672508 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/852cfff4-0855-40ab-a82d-b560c37118bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.672517 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.672527 5002 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: E1203 16:56:06.672793 5002 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 03 16:56:06 crc kubenswrapper[5002]: E1203 16:56:06.672836 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts podName:721437ce-17b0-434b-9604-197f795ed1d9 nodeName:}" failed. No retries permitted until 2025-12-03 16:56:08.672821609 +0000 UTC m=+1492.086643497 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts") pod "novacell0be01-account-delete-r7zk7" (UID: "721437ce-17b0-434b-9604-197f795ed1d9") : configmap "openstack-scripts" not found Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.677110 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-6495d47864-tf6dm"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.681151 5002 scope.go:117] "RemoveContainer" containerID="2ac72ccfe2ed308920637f371bbf7fd278ac3de12deca33a58820b774a39eb81" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.684501 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-6495d47864-tf6dm"] Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.830771 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-config-data" (OuterVolumeSpecName: "config-data") pod "36469a67-4d79-419f-9aaf-a1c128132287" (UID: "36469a67-4d79-419f-9aaf-a1c128132287"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.857525 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c099352-abbe-4c3a-9431-c854e5333420" path="/var/lib/kubelet/pods/1c099352-abbe-4c3a-9431-c854e5333420/volumes" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.858346 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" path="/var/lib/kubelet/pods/4e408ec9-5662-43a6-93fd-9fa7a60f98db/volumes" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.859052 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" path="/var/lib/kubelet/pods/55e94451-ebc7-4a6c-9927-df89ae0fc3c2/volumes" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.865393 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a3af9fa-d550-4d97-8d54-b198f0ca6f31" path="/var/lib/kubelet/pods/5a3af9fa-d550-4d97-8d54-b198f0ca6f31/volumes" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.865937 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85214ca4-e776-4b0a-893d-516243894640" path="/var/lib/kubelet/pods/85214ca4-e776-4b0a-893d-516243894640/volumes" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.866473 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="965b05ab-f8e9-485e-9f15-2160a598d8c2" path="/var/lib/kubelet/pods/965b05ab-f8e9-485e-9f15-2160a598d8c2/volumes" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.867964 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b132eed4-cb4d-4abc-b49a-55688686288d" path="/var/lib/kubelet/pods/b132eed4-cb4d-4abc-b49a-55688686288d/volumes" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.868664 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b576f304-fd7e-419e-937c-dafaf1c00970" path="/var/lib/kubelet/pods/b576f304-fd7e-419e-937c-dafaf1c00970/volumes" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.875952 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36469a67-4d79-419f-9aaf-a1c128132287-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.876092 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bea5a03b-519f-4af4-873a-e5e7f9e8f769" path="/var/lib/kubelet/pods/bea5a03b-519f-4af4-873a-e5e7f9e8f769/volumes" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.876725 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d105ad9d-fbca-4a0c-b188-a88a363756c2" path="/var/lib/kubelet/pods/d105ad9d-fbca-4a0c-b188-a88a363756c2/volumes" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.877698 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de753fc7-23ae-4680-9d4c-11e5632d749d" path="/var/lib/kubelet/pods/de753fc7-23ae-4680-9d4c-11e5632d749d/volumes" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.879068 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" path="/var/lib/kubelet/pods/ec3d3d6b-b1df-4d20-8fc7-ca518ea56929/volumes" Dec 03 16:56:06 crc kubenswrapper[5002]: I1203 16:56:06.906225 5002 scope.go:117] "RemoveContainer" containerID="7782f9e82bd0b16c1ea4af876571de089d43551370a7363494d32a492da6fdb9" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.005802 5002 scope.go:117] "RemoveContainer" containerID="4e5b334c5a3bbdfab01571abbbf387660f52e776af1bc465c880770ae52c4855" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.270788 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.311727 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.312467 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"852cfff4-0855-40ab-a82d-b560c37118bf","Type":"ContainerDied","Data":"140dc0384c83fe9ebfee94174d975f61001d65a677e4fd2c13ff06e0213532ce"} Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.330064 5002 scope.go:117] "RemoveContainer" containerID="2ba9a566a71354e8ef6bda7b9341c58e4f1b4fb5649b671ff2b3c91f0cb5d63d" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.338924 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.342175 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placemente6cd-account-delete-wpdgt" event={"ID":"05d0c2be-6525-4ec1-bcae-e240255c970a","Type":"ContainerDied","Data":"e2d41fb6210ddfb0fb39f3a4bf92de9fc6bad5a954c43dc2e9abead7a4532887"} Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.342211 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2d41fb6210ddfb0fb39f3a4bf92de9fc6bad5a954c43dc2e9abead7a4532887" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.342276 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placemente6cd-account-delete-wpdgt" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.351545 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.359679 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_29a68818-9346-4437-9527-aea9383c1a25/ovn-northd/0.log" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.359731 5002 generic.go:334] "Generic (PLEG): container finished" podID="29a68818-9346-4437-9527-aea9383c1a25" containerID="8d6e5fcf1f9f04b8b697e12bf0ae6fa6a1d7ab6a6a2509530bdd7448a85fdb39" exitCode=139 Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.359808 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"29a68818-9346-4437-9527-aea9383c1a25","Type":"ContainerDied","Data":"8d6e5fcf1f9f04b8b697e12bf0ae6fa6a1d7ab6a6a2509530bdd7448a85fdb39"} Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.363636 5002 generic.go:334] "Generic (PLEG): container finished" podID="e7a598f1-0f32-448c-b08f-b5b0e70f583d" containerID="d73705cc9fa58d7cab153dcce4358e73807cf1f2ea894bb4e2604b295891a9c0" exitCode=0 Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.363703 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7a598f1-0f32-448c-b08f-b5b0e70f583d","Type":"ContainerDied","Data":"d73705cc9fa58d7cab153dcce4358e73807cf1f2ea894bb4e2604b295891a9c0"} Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.367485 5002 generic.go:334] "Generic (PLEG): container finished" podID="60743dc4-9a30-4fd2-80c1-0c7427241e92" containerID="76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236" exitCode=0 Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.367548 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"60743dc4-9a30-4fd2-80c1-0c7427241e92","Type":"ContainerDied","Data":"76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236"} Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.367572 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"60743dc4-9a30-4fd2-80c1-0c7427241e92","Type":"ContainerDied","Data":"12111a4a3befceccbaf0cdd0c492b228d6afde8467a782aedb5f6fb3a1562a1d"} Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.367639 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.372543 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.372581 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronb96b-account-delete-ttg76" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.372577 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican8d7a-account-delete-fx9r7" event={"ID":"a9922091-0d6f-44cf-9b98-8b97a811ea26","Type":"ContainerDied","Data":"aaeb745fd17c0c125b59345463fa15671df0259f3d508b558026629b9155ea38"} Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.372621 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.372635 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aaeb745fd17c0c125b59345463fa15671df0259f3d508b558026629b9155ea38" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.372696 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi8fda-account-delete-h8c2d" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.372547 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican8d7a-account-delete-fx9r7" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.372818 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.378584 5002 scope.go:117] "RemoveContainer" containerID="76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.386663 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sj2kv\" (UniqueName: \"kubernetes.io/projected/60743dc4-9a30-4fd2-80c1-0c7427241e92-kube-api-access-sj2kv\") pod \"60743dc4-9a30-4fd2-80c1-0c7427241e92\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.386760 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-kolla-config\") pod \"60743dc4-9a30-4fd2-80c1-0c7427241e92\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.386794 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60743dc4-9a30-4fd2-80c1-0c7427241e92-combined-ca-bundle\") pod \"60743dc4-9a30-4fd2-80c1-0c7427241e92\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.386818 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"60743dc4-9a30-4fd2-80c1-0c7427241e92\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.386838 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/60743dc4-9a30-4fd2-80c1-0c7427241e92-config-data-generated\") pod \"60743dc4-9a30-4fd2-80c1-0c7427241e92\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.387048 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/60743dc4-9a30-4fd2-80c1-0c7427241e92-galera-tls-certs\") pod \"60743dc4-9a30-4fd2-80c1-0c7427241e92\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.387083 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-config-data-default\") pod \"60743dc4-9a30-4fd2-80c1-0c7427241e92\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.387132 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-operator-scripts\") pod \"60743dc4-9a30-4fd2-80c1-0c7427241e92\" (UID: \"60743dc4-9a30-4fd2-80c1-0c7427241e92\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.387309 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "60743dc4-9a30-4fd2-80c1-0c7427241e92" (UID: "60743dc4-9a30-4fd2-80c1-0c7427241e92"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.388082 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60743dc4-9a30-4fd2-80c1-0c7427241e92-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "60743dc4-9a30-4fd2-80c1-0c7427241e92" (UID: "60743dc4-9a30-4fd2-80c1-0c7427241e92"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.388130 5002 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.388567 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "60743dc4-9a30-4fd2-80c1-0c7427241e92" (UID: "60743dc4-9a30-4fd2-80c1-0c7427241e92"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.389214 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "60743dc4-9a30-4fd2-80c1-0c7427241e92" (UID: "60743dc4-9a30-4fd2-80c1-0c7427241e92"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.389903 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placemente6cd-account-delete-wpdgt"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.393640 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60743dc4-9a30-4fd2-80c1-0c7427241e92-kube-api-access-sj2kv" (OuterVolumeSpecName: "kube-api-access-sj2kv") pod "60743dc4-9a30-4fd2-80c1-0c7427241e92" (UID: "60743dc4-9a30-4fd2-80c1-0c7427241e92"). InnerVolumeSpecName "kube-api-access-sj2kv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.408920 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "mysql-db") pod "60743dc4-9a30-4fd2-80c1-0c7427241e92" (UID: "60743dc4-9a30-4fd2-80c1-0c7427241e92"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.420374 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placemente6cd-account-delete-wpdgt"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.422970 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60743dc4-9a30-4fd2-80c1-0c7427241e92-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "60743dc4-9a30-4fd2-80c1-0c7427241e92" (UID: "60743dc4-9a30-4fd2-80c1-0c7427241e92"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.444489 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.445621 5002 scope.go:117] "RemoveContainer" containerID="de9f69bc28ad832b164dffa53cef1b38dfffdb1897a1751ab82cb482a9f7e4c3" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.457021 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.458342 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60743dc4-9a30-4fd2-80c1-0c7427241e92-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "60743dc4-9a30-4fd2-80c1-0c7427241e92" (UID: "60743dc4-9a30-4fd2-80c1-0c7427241e92"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.469112 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.484685 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.488978 5002 scope.go:117] "RemoveContainer" containerID="76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.490118 5002 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/60743dc4-9a30-4fd2-80c1-0c7427241e92-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.490139 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.490149 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60743dc4-9a30-4fd2-80c1-0c7427241e92-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.490160 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sj2kv\" (UniqueName: \"kubernetes.io/projected/60743dc4-9a30-4fd2-80c1-0c7427241e92-kube-api-access-sj2kv\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.490169 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60743dc4-9a30-4fd2-80c1-0c7427241e92-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.490191 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.490200 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/60743dc4-9a30-4fd2-80c1-0c7427241e92-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: E1203 16:56:07.494044 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236\": container with ID starting with 76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236 not found: ID does not exist" containerID="76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.494097 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236"} err="failed to get container status \"76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236\": rpc error: code = NotFound desc = could not find container \"76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236\": container with ID starting with 76365eef79adf987234591f300047cc4fd1493fd5bc235b151ccb980a898d236 not found: ID does not exist" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.494129 5002 scope.go:117] "RemoveContainer" containerID="de9f69bc28ad832b164dffa53cef1b38dfffdb1897a1751ab82cb482a9f7e4c3" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.494335 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:56:07 crc kubenswrapper[5002]: E1203 16:56:07.494528 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de9f69bc28ad832b164dffa53cef1b38dfffdb1897a1751ab82cb482a9f7e4c3\": container with ID starting with de9f69bc28ad832b164dffa53cef1b38dfffdb1897a1751ab82cb482a9f7e4c3 not found: ID does not exist" containerID="de9f69bc28ad832b164dffa53cef1b38dfffdb1897a1751ab82cb482a9f7e4c3" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.494553 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de9f69bc28ad832b164dffa53cef1b38dfffdb1897a1751ab82cb482a9f7e4c3"} err="failed to get container status \"de9f69bc28ad832b164dffa53cef1b38dfffdb1897a1751ab82cb482a9f7e4c3\": rpc error: code = NotFound desc = could not find container \"de9f69bc28ad832b164dffa53cef1b38dfffdb1897a1751ab82cb482a9f7e4c3\": container with ID starting with de9f69bc28ad832b164dffa53cef1b38dfffdb1897a1751ab82cb482a9f7e4c3 not found: ID does not exist" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.504168 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.511480 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.511475 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi8fda-account-delete-h8c2d"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.522368 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapi8fda-account-delete-h8c2d"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.535028 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican8d7a-account-delete-fx9r7"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.546619 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican8d7a-account-delete-fx9r7"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.549110 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_29a68818-9346-4437-9527-aea9383c1a25/ovn-northd/0.log" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.549178 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.560147 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronb96b-account-delete-ttg76"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.564337 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutronb96b-account-delete-ttg76"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.597546 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: E1203 16:56:07.656430 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9bf24b4c96cc80a30cc1370d78b90b73f20e379aa3232516e9166ca4be82531b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:56:07 crc kubenswrapper[5002]: E1203 16:56:07.658235 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9bf24b4c96cc80a30cc1370d78b90b73f20e379aa3232516e9166ca4be82531b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:56:07 crc kubenswrapper[5002]: E1203 16:56:07.673322 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9bf24b4c96cc80a30cc1370d78b90b73f20e379aa3232516e9166ca4be82531b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:56:07 crc kubenswrapper[5002]: E1203 16:56:07.673396 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="86921476-d5b9-4fc0-86d1-aa82dd931e5f" containerName="nova-scheduler-scheduler" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.698185 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29a68818-9346-4437-9527-aea9383c1a25-config\") pod \"29a68818-9346-4437-9527-aea9383c1a25\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.698256 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-ovn-northd-tls-certs\") pod \"29a68818-9346-4437-9527-aea9383c1a25\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.698336 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/29a68818-9346-4437-9527-aea9383c1a25-ovn-rundir\") pod \"29a68818-9346-4437-9527-aea9383c1a25\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.698391 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-combined-ca-bundle\") pod \"29a68818-9346-4437-9527-aea9383c1a25\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.698465 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7dfn\" (UniqueName: \"kubernetes.io/projected/29a68818-9346-4437-9527-aea9383c1a25-kube-api-access-n7dfn\") pod \"29a68818-9346-4437-9527-aea9383c1a25\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.698501 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29a68818-9346-4437-9527-aea9383c1a25-scripts\") pod \"29a68818-9346-4437-9527-aea9383c1a25\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.698531 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-metrics-certs-tls-certs\") pod \"29a68818-9346-4437-9527-aea9383c1a25\" (UID: \"29a68818-9346-4437-9527-aea9383c1a25\") " Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.698720 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29a68818-9346-4437-9527-aea9383c1a25-config" (OuterVolumeSpecName: "config") pod "29a68818-9346-4437-9527-aea9383c1a25" (UID: "29a68818-9346-4437-9527-aea9383c1a25"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.698982 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29a68818-9346-4437-9527-aea9383c1a25-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.699775 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29a68818-9346-4437-9527-aea9383c1a25-scripts" (OuterVolumeSpecName: "scripts") pod "29a68818-9346-4437-9527-aea9383c1a25" (UID: "29a68818-9346-4437-9527-aea9383c1a25"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.701828 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29a68818-9346-4437-9527-aea9383c1a25-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "29a68818-9346-4437-9527-aea9383c1a25" (UID: "29a68818-9346-4437-9527-aea9383c1a25"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.702986 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29a68818-9346-4437-9527-aea9383c1a25-kube-api-access-n7dfn" (OuterVolumeSpecName: "kube-api-access-n7dfn") pod "29a68818-9346-4437-9527-aea9383c1a25" (UID: "29a68818-9346-4437-9527-aea9383c1a25"). InnerVolumeSpecName "kube-api-access-n7dfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.759482 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29a68818-9346-4437-9527-aea9383c1a25" (UID: "29a68818-9346-4437-9527-aea9383c1a25"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.796027 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "29a68818-9346-4437-9527-aea9383c1a25" (UID: "29a68818-9346-4437-9527-aea9383c1a25"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.800945 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7dfn\" (UniqueName: \"kubernetes.io/projected/29a68818-9346-4437-9527-aea9383c1a25-kube-api-access-n7dfn\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.800979 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29a68818-9346-4437-9527-aea9383c1a25-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.800991 5002 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.801001 5002 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/29a68818-9346-4437-9527-aea9383c1a25-ovn-rundir\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.801011 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.801988 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "29a68818-9346-4437-9527-aea9383c1a25" (UID: "29a68818-9346-4437-9527-aea9383c1a25"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.899350 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.900253 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.902381 5002 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a68818-9346-4437-9527-aea9383c1a25-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:07 crc kubenswrapper[5002]: I1203 16:56:07.906083 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.004037 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bn68v\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-kube-api-access-bn68v\") pod \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.004108 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-server-conf\") pod \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.004153 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-confd\") pod \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.004168 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-plugins-conf\") pod \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.004208 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data\") pod \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.004226 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-plugins\") pod \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.004282 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-tls\") pod \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.004323 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.004356 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-erlang-cookie\") pod \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.004393 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e7a598f1-0f32-448c-b08f-b5b0e70f583d-pod-info\") pod \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.004426 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e7a598f1-0f32-448c-b08f-b5b0e70f583d-erlang-cookie-secret\") pod \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\" (UID: \"e7a598f1-0f32-448c-b08f-b5b0e70f583d\") " Dec 03 16:56:08 crc kubenswrapper[5002]: E1203 16:56:08.004776 5002 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 03 16:56:08 crc kubenswrapper[5002]: E1203 16:56:08.004824 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data podName:382d6556-c45b-43dd-a4fa-16b3e91e0725 nodeName:}" failed. No retries permitted until 2025-12-03 16:56:16.004810013 +0000 UTC m=+1499.418631901 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data") pod "rabbitmq-server-0" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725") : configmap "rabbitmq-config-data" not found Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.009724 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "e7a598f1-0f32-448c-b08f-b5b0e70f583d" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.009974 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-kube-api-access-bn68v" (OuterVolumeSpecName: "kube-api-access-bn68v") pod "e7a598f1-0f32-448c-b08f-b5b0e70f583d" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d"). InnerVolumeSpecName "kube-api-access-bn68v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.010969 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "e7a598f1-0f32-448c-b08f-b5b0e70f583d" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.011194 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "e7a598f1-0f32-448c-b08f-b5b0e70f583d" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.012724 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "e7a598f1-0f32-448c-b08f-b5b0e70f583d" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.015112 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7a598f1-0f32-448c-b08f-b5b0e70f583d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "e7a598f1-0f32-448c-b08f-b5b0e70f583d" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.016951 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/e7a598f1-0f32-448c-b08f-b5b0e70f583d-pod-info" (OuterVolumeSpecName: "pod-info") pod "e7a598f1-0f32-448c-b08f-b5b0e70f583d" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.019285 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "e7a598f1-0f32-448c-b08f-b5b0e70f583d" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.031037 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.031258 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.038788 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data" (OuterVolumeSpecName: "config-data") pod "e7a598f1-0f32-448c-b08f-b5b0e70f583d" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.087419 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-server-conf" (OuterVolumeSpecName: "server-conf") pod "e7a598f1-0f32-448c-b08f-b5b0e70f583d" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.106881 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.107084 5002 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e7a598f1-0f32-448c-b08f-b5b0e70f583d-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.107185 5002 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e7a598f1-0f32-448c-b08f-b5b0e70f583d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.107514 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bn68v\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-kube-api-access-bn68v\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.107590 5002 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.107641 5002 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.107704 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e7a598f1-0f32-448c-b08f-b5b0e70f583d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.107777 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.107830 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.107914 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.118780 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "e7a598f1-0f32-448c-b08f-b5b0e70f583d" (UID: "e7a598f1-0f32-448c-b08f-b5b0e70f583d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.127709 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.209613 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-scripts\") pod \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.209652 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-combined-ca-bundle\") pod \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.209680 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-public-tls-certs\") pod \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.209701 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-plugins\") pod \"382d6556-c45b-43dd-a4fa-16b3e91e0725\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.209733 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/382d6556-c45b-43dd-a4fa-16b3e91e0725-pod-info\") pod \"382d6556-c45b-43dd-a4fa-16b3e91e0725\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.209773 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sl2pp\" (UniqueName: \"kubernetes.io/projected/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-kube-api-access-sl2pp\") pod \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.209821 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-credential-keys\") pod \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.209872 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-plugins-conf\") pod \"382d6556-c45b-43dd-a4fa-16b3e91e0725\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.209893 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tz7gh\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-kube-api-access-tz7gh\") pod \"382d6556-c45b-43dd-a4fa-16b3e91e0725\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.209909 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"382d6556-c45b-43dd-a4fa-16b3e91e0725\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.209942 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-server-conf\") pod \"382d6556-c45b-43dd-a4fa-16b3e91e0725\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.209960 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-erlang-cookie\") pod \"382d6556-c45b-43dd-a4fa-16b3e91e0725\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.210007 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-fernet-keys\") pod \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.210034 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data\") pod \"382d6556-c45b-43dd-a4fa-16b3e91e0725\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.210058 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-tls\") pod \"382d6556-c45b-43dd-a4fa-16b3e91e0725\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.210072 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-config-data\") pod \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.210099 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/382d6556-c45b-43dd-a4fa-16b3e91e0725-erlang-cookie-secret\") pod \"382d6556-c45b-43dd-a4fa-16b3e91e0725\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.210126 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-confd\") pod \"382d6556-c45b-43dd-a4fa-16b3e91e0725\" (UID: \"382d6556-c45b-43dd-a4fa-16b3e91e0725\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.210143 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-internal-tls-certs\") pod \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\" (UID: \"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0\") " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.211051 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e7a598f1-0f32-448c-b08f-b5b0e70f583d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.211079 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.211350 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "382d6556-c45b-43dd-a4fa-16b3e91e0725" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.211638 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "382d6556-c45b-43dd-a4fa-16b3e91e0725" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.214891 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "persistence") pod "382d6556-c45b-43dd-a4fa-16b3e91e0725" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.215830 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-kube-api-access-tz7gh" (OuterVolumeSpecName: "kube-api-access-tz7gh") pod "382d6556-c45b-43dd-a4fa-16b3e91e0725" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725"). InnerVolumeSpecName "kube-api-access-tz7gh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.217345 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-scripts" (OuterVolumeSpecName: "scripts") pod "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" (UID: "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.219970 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" (UID: "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.221092 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "382d6556-c45b-43dd-a4fa-16b3e91e0725" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.225938 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/382d6556-c45b-43dd-a4fa-16b3e91e0725-pod-info" (OuterVolumeSpecName: "pod-info") pod "382d6556-c45b-43dd-a4fa-16b3e91e0725" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.225963 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" (UID: "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.225999 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "382d6556-c45b-43dd-a4fa-16b3e91e0725" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.228035 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-kube-api-access-sl2pp" (OuterVolumeSpecName: "kube-api-access-sl2pp") pod "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" (UID: "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0"). InnerVolumeSpecName "kube-api-access-sl2pp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.229422 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/382d6556-c45b-43dd-a4fa-16b3e91e0725-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "382d6556-c45b-43dd-a4fa-16b3e91e0725" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.232639 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data" (OuterVolumeSpecName: "config-data") pod "382d6556-c45b-43dd-a4fa-16b3e91e0725" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.252109 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-server-conf" (OuterVolumeSpecName: "server-conf") pod "382d6556-c45b-43dd-a4fa-16b3e91e0725" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.253873 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-config-data" (OuterVolumeSpecName: "config-data") pod "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" (UID: "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.262298 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" (UID: "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.264992 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" (UID: "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.279986 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" (UID: "dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312485 5002 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312520 5002 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312534 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tz7gh\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-kube-api-access-tz7gh\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312573 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312586 5002 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312599 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312611 5002 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312622 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/382d6556-c45b-43dd-a4fa-16b3e91e0725-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312633 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312644 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312656 5002 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/382d6556-c45b-43dd-a4fa-16b3e91e0725-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312669 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312679 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312691 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312702 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312713 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312725 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sl2pp\" (UniqueName: \"kubernetes.io/projected/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0-kube-api-access-sl2pp\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.312736 5002 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/382d6556-c45b-43dd-a4fa-16b3e91e0725-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.314973 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "382d6556-c45b-43dd-a4fa-16b3e91e0725" (UID: "382d6556-c45b-43dd-a4fa-16b3e91e0725"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.334011 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.399901 5002 generic.go:334] "Generic (PLEG): container finished" podID="382d6556-c45b-43dd-a4fa-16b3e91e0725" containerID="4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8" exitCode=0 Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.400044 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"382d6556-c45b-43dd-a4fa-16b3e91e0725","Type":"ContainerDied","Data":"4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8"} Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.400103 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"382d6556-c45b-43dd-a4fa-16b3e91e0725","Type":"ContainerDied","Data":"d2f08efd4e97d1e3035b6d1be2e152b6fe41a7383fa709fefc5c3a2004dfbffd"} Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.400122 5002 scope.go:117] "RemoveContainer" containerID="4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.400362 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.405799 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_29a68818-9346-4437-9527-aea9383c1a25/ovn-northd/0.log" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.405916 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"29a68818-9346-4437-9527-aea9383c1a25","Type":"ContainerDied","Data":"0f495ec01230cedb5d9aa47c7518f3e4e3ccb1c272ff505b0a451117c9eff00c"} Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.405963 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.413842 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/382d6556-c45b-43dd-a4fa-16b3e91e0725-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.413878 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.427624 5002 generic.go:334] "Generic (PLEG): container finished" podID="dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" containerID="8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455" exitCode=0 Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.427725 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-78586996b8-nkxdr" event={"ID":"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0","Type":"ContainerDied","Data":"8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455"} Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.427778 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-78586996b8-nkxdr" event={"ID":"dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0","Type":"ContainerDied","Data":"fcac53e53514bac34b47de2b8095708950f6f8629b8f5359d65cae13b538d286"} Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.427816 5002 scope.go:117] "RemoveContainer" containerID="4ea3eb5ddafb1a5617812d42da8c949aec8550f891409d4712a4994fb7e7c919" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.427969 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-78586996b8-nkxdr" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.438356 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7a598f1-0f32-448c-b08f-b5b0e70f583d","Type":"ContainerDied","Data":"d07315ea7de4f54cb5c2b58d32056f7b796e0c7d326364b64e451ae7e2db2f19"} Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.438502 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.464616 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.482077 5002 scope.go:117] "RemoveContainer" containerID="4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8" Dec 03 16:56:08 crc kubenswrapper[5002]: E1203 16:56:08.498828 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8\": container with ID starting with 4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8 not found: ID does not exist" containerID="4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.498950 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8"} err="failed to get container status \"4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8\": rpc error: code = NotFound desc = could not find container \"4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8\": container with ID starting with 4cd128117c8f2717be487655dd7e428999c0f8579dc3324f59cf7f90e6f493b8 not found: ID does not exist" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.499008 5002 scope.go:117] "RemoveContainer" containerID="4ea3eb5ddafb1a5617812d42da8c949aec8550f891409d4712a4994fb7e7c919" Dec 03 16:56:08 crc kubenswrapper[5002]: E1203 16:56:08.499452 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ea3eb5ddafb1a5617812d42da8c949aec8550f891409d4712a4994fb7e7c919\": container with ID starting with 4ea3eb5ddafb1a5617812d42da8c949aec8550f891409d4712a4994fb7e7c919 not found: ID does not exist" containerID="4ea3eb5ddafb1a5617812d42da8c949aec8550f891409d4712a4994fb7e7c919" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.499510 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ea3eb5ddafb1a5617812d42da8c949aec8550f891409d4712a4994fb7e7c919"} err="failed to get container status \"4ea3eb5ddafb1a5617812d42da8c949aec8550f891409d4712a4994fb7e7c919\": rpc error: code = NotFound desc = could not find container \"4ea3eb5ddafb1a5617812d42da8c949aec8550f891409d4712a4994fb7e7c919\": container with ID starting with 4ea3eb5ddafb1a5617812d42da8c949aec8550f891409d4712a4994fb7e7c919 not found: ID does not exist" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.499543 5002 scope.go:117] "RemoveContainer" containerID="d01d9155c2a1be73fba2d20cfcf3edb5d0d73c8658491e3d1015b359c087e0ad" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.513960 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.532352 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.548260 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.550484 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-78586996b8-nkxdr"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.560720 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-78586996b8-nkxdr"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.571804 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.576215 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.578545 5002 scope.go:117] "RemoveContainer" containerID="8d6e5fcf1f9f04b8b697e12bf0ae6fa6a1d7ab6a6a2509530bdd7448a85fdb39" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.584057 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-gfsqx"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.593584 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-gfsqx"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.610573 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glancea518-account-delete-tg6nl"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.613469 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-a518-account-create-update-d86mt"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.620132 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glancea518-account-delete-tg6nl"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.627177 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-a518-account-create-update-d86mt"] Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.636571 5002 scope.go:117] "RemoveContainer" containerID="8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.677398 5002 scope.go:117] "RemoveContainer" containerID="8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455" Dec 03 16:56:08 crc kubenswrapper[5002]: E1203 16:56:08.678630 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455\": container with ID starting with 8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455 not found: ID does not exist" containerID="8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.678678 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455"} err="failed to get container status \"8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455\": rpc error: code = NotFound desc = could not find container \"8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455\": container with ID starting with 8b76a1f377cb01c24a0515313390a455612297d0938e83ff0b083343cfcf5455 not found: ID does not exist" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.678711 5002 scope.go:117] "RemoveContainer" containerID="d73705cc9fa58d7cab153dcce4358e73807cf1f2ea894bb4e2604b295891a9c0" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.707209 5002 scope.go:117] "RemoveContainer" containerID="5caff8192bc34681b3f0760b1b195bcfacf6add52d3a669e3d1acea1cb2ca939" Dec 03 16:56:08 crc kubenswrapper[5002]: E1203 16:56:08.718921 5002 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 03 16:56:08 crc kubenswrapper[5002]: E1203 16:56:08.718999 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts podName:721437ce-17b0-434b-9604-197f795ed1d9 nodeName:}" failed. No retries permitted until 2025-12-03 16:56:12.71897971 +0000 UTC m=+1496.132801598 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts") pod "novacell0be01-account-delete-r7zk7" (UID: "721437ce-17b0-434b-9604-197f795ed1d9") : configmap "openstack-scripts" not found Dec 03 16:56:08 crc kubenswrapper[5002]: E1203 16:56:08.860390 5002 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Dec 03 16:56:08 crc kubenswrapper[5002]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-12-03T16:56:01Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 03 16:56:08 crc kubenswrapper[5002]: /etc/init.d/functions: line 589: 393 Alarm clock "$@" Dec 03 16:56:08 crc kubenswrapper[5002]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-hnkdk" message=< Dec 03 16:56:08 crc kubenswrapper[5002]: Exiting ovn-controller (1) [FAILED] Dec 03 16:56:08 crc kubenswrapper[5002]: Killing ovn-controller (1) [ OK ] Dec 03 16:56:08 crc kubenswrapper[5002]: Killing ovn-controller (1) with SIGKILL [ OK ] Dec 03 16:56:08 crc kubenswrapper[5002]: 2025-12-03T16:56:01Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 03 16:56:08 crc kubenswrapper[5002]: /etc/init.d/functions: line 589: 393 Alarm clock "$@" Dec 03 16:56:08 crc kubenswrapper[5002]: > Dec 03 16:56:08 crc kubenswrapper[5002]: E1203 16:56:08.860435 5002 kuberuntime_container.go:691] "PreStop hook failed" err=< Dec 03 16:56:08 crc kubenswrapper[5002]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-12-03T16:56:01Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 03 16:56:08 crc kubenswrapper[5002]: /etc/init.d/functions: line 589: 393 Alarm clock "$@" Dec 03 16:56:08 crc kubenswrapper[5002]: > pod="openstack/ovn-controller-hnkdk" podUID="2e876c11-14f1-4e51-90a1-e2cdddc08c87" containerName="ovn-controller" containerID="cri-o://aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.860477 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-hnkdk" podUID="2e876c11-14f1-4e51-90a1-e2cdddc08c87" containerName="ovn-controller" containerID="cri-o://aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa" gracePeriod=22 Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.867804 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05d0c2be-6525-4ec1-bcae-e240255c970a" path="/var/lib/kubelet/pods/05d0c2be-6525-4ec1-bcae-e240255c970a/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.885186 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13c5aef5-d2f5-4449-8cce-125cdf61d06b" path="/var/lib/kubelet/pods/13c5aef5-d2f5-4449-8cce-125cdf61d06b/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.886082 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29a68818-9346-4437-9527-aea9383c1a25" path="/var/lib/kubelet/pods/29a68818-9346-4437-9527-aea9383c1a25/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.887051 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33f61f67-5ef5-41a9-8bea-3335115b78e9" path="/var/lib/kubelet/pods/33f61f67-5ef5-41a9-8bea-3335115b78e9/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.887591 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36469a67-4d79-419f-9aaf-a1c128132287" path="/var/lib/kubelet/pods/36469a67-4d79-419f-9aaf-a1c128132287/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.889120 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="382d6556-c45b-43dd-a4fa-16b3e91e0725" path="/var/lib/kubelet/pods/382d6556-c45b-43dd-a4fa-16b3e91e0725/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.900170 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7" path="/var/lib/kubelet/pods/4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.904973 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60743dc4-9a30-4fd2-80c1-0c7427241e92" path="/var/lib/kubelet/pods/60743dc4-9a30-4fd2-80c1-0c7427241e92/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.906680 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75d1ca49-ec07-400e-89fd-cb277e813e98" path="/var/lib/kubelet/pods/75d1ca49-ec07-400e-89fd-cb277e813e98/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.907511 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="852cfff4-0855-40ab-a82d-b560c37118bf" path="/var/lib/kubelet/pods/852cfff4-0855-40ab-a82d-b560c37118bf/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.909565 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9922091-0d6f-44cf-9b98-8b97a811ea26" path="/var/lib/kubelet/pods/a9922091-0d6f-44cf-9b98-8b97a811ea26/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.911025 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b220588a-2f7f-4761-a3bc-d0021162cb10" path="/var/lib/kubelet/pods/b220588a-2f7f-4761-a3bc-d0021162cb10/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.911694 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc3c0d8b-823d-42bc-a114-766494075e59" path="/var/lib/kubelet/pods/bc3c0d8b-823d-42bc-a114-766494075e59/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.912543 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" path="/var/lib/kubelet/pods/dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.916035 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7a598f1-0f32-448c-b08f-b5b0e70f583d" path="/var/lib/kubelet/pods/e7a598f1-0f32-448c-b08f-b5b0e70f583d/volumes" Dec 03 16:56:08 crc kubenswrapper[5002]: I1203 16:56:08.917849 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" path="/var/lib/kubelet/pods/f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a/volumes" Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.146881 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa is running failed: container process not found" containerID="aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.150328 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa is running failed: container process not found" containerID="aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.151033 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa is running failed: container process not found" containerID="aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.151064 5002 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-hnkdk" podUID="2e876c11-14f1-4e51-90a1-e2cdddc08c87" containerName="ovn-controller" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.154684 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.240211 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.241444 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.241791 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.241822 5002 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server" Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.242368 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.252604 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.254689 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.254775 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovs-vswitchd" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.284723 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-hnkdk_2e876c11-14f1-4e51-90a1-e2cdddc08c87/ovn-controller/0.log" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.284822 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hnkdk" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.341064 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz7g2\" (UniqueName: \"kubernetes.io/projected/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-kube-api-access-wz7g2\") pod \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.341150 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-config-data-custom\") pod \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.341193 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-combined-ca-bundle\") pod \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.342250 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-run\") pod \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.342300 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e876c11-14f1-4e51-90a1-e2cdddc08c87-combined-ca-bundle\") pod \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.342350 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-config-data\") pod \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.342368 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-logs\") pod \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\" (UID: \"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.342664 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-run" (OuterVolumeSpecName: "var-run") pod "2e876c11-14f1-4e51-90a1-e2cdddc08c87" (UID: "2e876c11-14f1-4e51-90a1-e2cdddc08c87"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.343024 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-logs" (OuterVolumeSpecName: "logs") pod "40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" (UID: "40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.347374 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" (UID: "40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.358412 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-kube-api-access-wz7g2" (OuterVolumeSpecName: "kube-api-access-wz7g2") pod "40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" (UID: "40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f"). InnerVolumeSpecName "kube-api-access-wz7g2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.373068 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" (UID: "40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.379173 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e876c11-14f1-4e51-90a1-e2cdddc08c87-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e876c11-14f1-4e51-90a1-e2cdddc08c87" (UID: "2e876c11-14f1-4e51-90a1-e2cdddc08c87"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.427405 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-config-data" (OuterVolumeSpecName: "config-data") pod "40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" (UID: "40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.445169 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5l8ll\" (UniqueName: \"kubernetes.io/projected/2e876c11-14f1-4e51-90a1-e2cdddc08c87-kube-api-access-5l8ll\") pod \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.448400 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e876c11-14f1-4e51-90a1-e2cdddc08c87-kube-api-access-5l8ll" (OuterVolumeSpecName: "kube-api-access-5l8ll") pod "2e876c11-14f1-4e51-90a1-e2cdddc08c87" (UID: "2e876c11-14f1-4e51-90a1-e2cdddc08c87"). InnerVolumeSpecName "kube-api-access-5l8ll". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.449025 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-log-ovn\") pod \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.449211 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2e876c11-14f1-4e51-90a1-e2cdddc08c87-scripts\") pod \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.449564 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "2e876c11-14f1-4e51-90a1-e2cdddc08c87" (UID: "2e876c11-14f1-4e51-90a1-e2cdddc08c87"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.449805 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-run-ovn\") pod \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.450150 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e876c11-14f1-4e51-90a1-e2cdddc08c87-ovn-controller-tls-certs\") pod \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\" (UID: \"2e876c11-14f1-4e51-90a1-e2cdddc08c87\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.450158 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "2e876c11-14f1-4e51-90a1-e2cdddc08c87" (UID: "2e876c11-14f1-4e51-90a1-e2cdddc08c87"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.451136 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e876c11-14f1-4e51-90a1-e2cdddc08c87-scripts" (OuterVolumeSpecName: "scripts") pod "2e876c11-14f1-4e51-90a1-e2cdddc08c87" (UID: "2e876c11-14f1-4e51-90a1-e2cdddc08c87"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.451855 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.451943 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.452031 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz7g2\" (UniqueName: \"kubernetes.io/projected/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-kube-api-access-wz7g2\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.452109 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5l8ll\" (UniqueName: \"kubernetes.io/projected/2e876c11-14f1-4e51-90a1-e2cdddc08c87-kube-api-access-5l8ll\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.452182 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.452292 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.452366 5002 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.452445 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2e876c11-14f1-4e51-90a1-e2cdddc08c87-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.452522 5002 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.452665 5002 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2e876c11-14f1-4e51-90a1-e2cdddc08c87-var-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.452905 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e876c11-14f1-4e51-90a1-e2cdddc08c87-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.456513 5002 generic.go:334] "Generic (PLEG): container finished" podID="aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" containerID="8a095cbc28aea7f906ad627bf6f7c5227b893239574390ebaaeddf3c3b84026c" exitCode=0 Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.456608 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" event={"ID":"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9","Type":"ContainerDied","Data":"8a095cbc28aea7f906ad627bf6f7c5227b893239574390ebaaeddf3c3b84026c"} Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.460381 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"86921476-d5b9-4fc0-86d1-aa82dd931e5f","Type":"ContainerDied","Data":"9bf24b4c96cc80a30cc1370d78b90b73f20e379aa3232516e9166ca4be82531b"} Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.460297 5002 generic.go:334] "Generic (PLEG): container finished" podID="86921476-d5b9-4fc0-86d1-aa82dd931e5f" containerID="9bf24b4c96cc80a30cc1370d78b90b73f20e379aa3232516e9166ca4be82531b" exitCode=0 Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.462425 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.466223 5002 generic.go:334] "Generic (PLEG): container finished" podID="40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" containerID="58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544" exitCode=0 Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.466443 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" event={"ID":"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f","Type":"ContainerDied","Data":"58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544"} Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.466501 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" event={"ID":"40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f","Type":"ContainerDied","Data":"bb591d2b9b74576585ab2eff0ecda2532a6b4df7a8b8d63f3ba4bd6de57ffa72"} Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.466553 5002 scope.go:117] "RemoveContainer" containerID="58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.466622 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-598cb58b8b-ww24g" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.471395 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-hnkdk_2e876c11-14f1-4e51-90a1-e2cdddc08c87/ovn-controller/0.log" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.471438 5002 generic.go:334] "Generic (PLEG): container finished" podID="2e876c11-14f1-4e51-90a1-e2cdddc08c87" containerID="aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa" exitCode=137 Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.471491 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hnkdk" event={"ID":"2e876c11-14f1-4e51-90a1-e2cdddc08c87","Type":"ContainerDied","Data":"aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa"} Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.471557 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hnkdk" event={"ID":"2e876c11-14f1-4e51-90a1-e2cdddc08c87","Type":"ContainerDied","Data":"5e65b7c3398269db4275ffac45374421277a1470bce66ee06a8533a0f24586b4"} Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.471616 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hnkdk" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.504226 5002 scope.go:117] "RemoveContainer" containerID="7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.516645 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e876c11-14f1-4e51-90a1-e2cdddc08c87-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "2e876c11-14f1-4e51-90a1-e2cdddc08c87" (UID: "2e876c11-14f1-4e51-90a1-e2cdddc08c87"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.546144 5002 scope.go:117] "RemoveContainer" containerID="58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544" Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.546851 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544\": container with ID starting with 58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544 not found: ID does not exist" containerID="58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.546893 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544"} err="failed to get container status \"58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544\": rpc error: code = NotFound desc = could not find container \"58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544\": container with ID starting with 58bfd6a627bdd6047b308b18ef05a861116dc5dc4c60d76a4791a8676bb9c544 not found: ID does not exist" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.547026 5002 scope.go:117] "RemoveContainer" containerID="7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7" Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.548059 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7\": container with ID starting with 7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7 not found: ID does not exist" containerID="7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.548099 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7"} err="failed to get container status \"7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7\": rpc error: code = NotFound desc = could not find container \"7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7\": container with ID starting with 7357aea451b0051e4fc395bd4960bb5dc2a6076947c742d8b017b1a7d0aae7f7 not found: ID does not exist" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.548128 5002 scope.go:117] "RemoveContainer" containerID="aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.550525 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-598cb58b8b-ww24g"] Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.554234 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-config-data\") pod \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.554298 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-logs\") pod \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.554366 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k92jp\" (UniqueName: \"kubernetes.io/projected/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-kube-api-access-k92jp\") pod \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.554414 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-combined-ca-bundle\") pod \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.554480 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-config-data-custom\") pod \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\" (UID: \"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.554776 5002 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e876c11-14f1-4e51-90a1-e2cdddc08c87-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.555523 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-logs" (OuterVolumeSpecName: "logs") pod "aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" (UID: "aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.555967 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-598cb58b8b-ww24g"] Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.557631 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" (UID: "aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.562417 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-kube-api-access-k92jp" (OuterVolumeSpecName: "kube-api-access-k92jp") pod "aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" (UID: "aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9"). InnerVolumeSpecName "kube-api-access-k92jp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.565142 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.573437 5002 scope.go:117] "RemoveContainer" containerID="aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa" Dec 03 16:56:09 crc kubenswrapper[5002]: E1203 16:56:09.574192 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa\": container with ID starting with aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa not found: ID does not exist" containerID="aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.574235 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa"} err="failed to get container status \"aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa\": rpc error: code = NotFound desc = could not find container \"aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa\": container with ID starting with aaff8f4a9939470c2f26a68a0b97470fab496714363d1dcee96075809d9b3baa not found: ID does not exist" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.586524 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" (UID: "aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.605755 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-config-data" (OuterVolumeSpecName: "config-data") pod "aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" (UID: "aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.656032 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86921476-d5b9-4fc0-86d1-aa82dd931e5f-combined-ca-bundle\") pod \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\" (UID: \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.656237 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whdtn\" (UniqueName: \"kubernetes.io/projected/86921476-d5b9-4fc0-86d1-aa82dd931e5f-kube-api-access-whdtn\") pod \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\" (UID: \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.656269 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86921476-d5b9-4fc0-86d1-aa82dd931e5f-config-data\") pod \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\" (UID: \"86921476-d5b9-4fc0-86d1-aa82dd931e5f\") " Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.656646 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.656662 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.656706 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.656715 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.656726 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k92jp\" (UniqueName: \"kubernetes.io/projected/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9-kube-api-access-k92jp\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.661232 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86921476-d5b9-4fc0-86d1-aa82dd931e5f-kube-api-access-whdtn" (OuterVolumeSpecName: "kube-api-access-whdtn") pod "86921476-d5b9-4fc0-86d1-aa82dd931e5f" (UID: "86921476-d5b9-4fc0-86d1-aa82dd931e5f"). InnerVolumeSpecName "kube-api-access-whdtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.673878 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86921476-d5b9-4fc0-86d1-aa82dd931e5f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "86921476-d5b9-4fc0-86d1-aa82dd931e5f" (UID: "86921476-d5b9-4fc0-86d1-aa82dd931e5f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.674135 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86921476-d5b9-4fc0-86d1-aa82dd931e5f-config-data" (OuterVolumeSpecName: "config-data") pod "86921476-d5b9-4fc0-86d1-aa82dd931e5f" (UID: "86921476-d5b9-4fc0-86d1-aa82dd931e5f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.758238 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86921476-d5b9-4fc0-86d1-aa82dd931e5f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.758657 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whdtn\" (UniqueName: \"kubernetes.io/projected/86921476-d5b9-4fc0-86d1-aa82dd931e5f-kube-api-access-whdtn\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.758671 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86921476-d5b9-4fc0-86d1-aa82dd931e5f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.800901 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-hnkdk"] Dec 03 16:56:09 crc kubenswrapper[5002]: I1203 16:56:09.811990 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-hnkdk"] Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.264987 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.265037 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.503360 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" event={"ID":"aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9","Type":"ContainerDied","Data":"67d6bf380cb559ff2e35e537c0675fb8421a705995c5a62094cc9c0c09299a57"} Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.503400 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5dc7d59f9c-7zhx2" Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.503430 5002 scope.go:117] "RemoveContainer" containerID="8a095cbc28aea7f906ad627bf6f7c5227b893239574390ebaaeddf3c3b84026c" Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.504943 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"86921476-d5b9-4fc0-86d1-aa82dd931e5f","Type":"ContainerDied","Data":"bb3ce16779002af79f9743393a59d7c153b54aea52b2c39b5dbb2e3c6ca54100"} Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.505031 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.550534 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.551466 5002 scope.go:117] "RemoveContainer" containerID="49cfe11824388fcd621bf5c8c3ab0301531171637a9eff8d8df877c7b70ebe69" Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.555544 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.565874 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-5dc7d59f9c-7zhx2"] Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.570458 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-5dc7d59f9c-7zhx2"] Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.573059 5002 scope.go:117] "RemoveContainer" containerID="9bf24b4c96cc80a30cc1370d78b90b73f20e379aa3232516e9166ca4be82531b" Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.851683 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e876c11-14f1-4e51-90a1-e2cdddc08c87" path="/var/lib/kubelet/pods/2e876c11-14f1-4e51-90a1-e2cdddc08c87/volumes" Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.852596 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" path="/var/lib/kubelet/pods/40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f/volumes" Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.853384 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86921476-d5b9-4fc0-86d1-aa82dd931e5f" path="/var/lib/kubelet/pods/86921476-d5b9-4fc0-86d1-aa82dd931e5f/volumes" Dec 03 16:56:10 crc kubenswrapper[5002]: I1203 16:56:10.854783 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" path="/var/lib/kubelet/pods/aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9/volumes" Dec 03 16:56:12 crc kubenswrapper[5002]: E1203 16:56:12.805257 5002 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 03 16:56:12 crc kubenswrapper[5002]: E1203 16:56:12.805710 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts podName:721437ce-17b0-434b-9604-197f795ed1d9 nodeName:}" failed. No retries permitted until 2025-12-03 16:56:20.805690169 +0000 UTC m=+1504.219512057 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts") pod "novacell0be01-account-delete-r7zk7" (UID: "721437ce-17b0-434b-9604-197f795ed1d9") : configmap "openstack-scripts" not found Dec 03 16:56:12 crc kubenswrapper[5002]: I1203 16:56:12.845218 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-68bfc56b4f-vnlr5" podUID="14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.150:9696/\": dial tcp 10.217.0.150:9696: connect: connection refused" Dec 03 16:56:14 crc kubenswrapper[5002]: E1203 16:56:14.231789 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:14 crc kubenswrapper[5002]: E1203 16:56:14.232779 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:14 crc kubenswrapper[5002]: E1203 16:56:14.232939 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:14 crc kubenswrapper[5002]: E1203 16:56:14.233321 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:14 crc kubenswrapper[5002]: E1203 16:56:14.233372 5002 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server" Dec 03 16:56:14 crc kubenswrapper[5002]: E1203 16:56:14.234086 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:14 crc kubenswrapper[5002]: E1203 16:56:14.236813 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:14 crc kubenswrapper[5002]: E1203 16:56:14.236918 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovs-vswitchd" Dec 03 16:56:19 crc kubenswrapper[5002]: E1203 16:56:19.232406 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:19 crc kubenswrapper[5002]: E1203 16:56:19.234318 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:19 crc kubenswrapper[5002]: E1203 16:56:19.234522 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:19 crc kubenswrapper[5002]: E1203 16:56:19.236125 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:19 crc kubenswrapper[5002]: E1203 16:56:19.240199 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:19 crc kubenswrapper[5002]: E1203 16:56:19.240247 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovs-vswitchd" Dec 03 16:56:19 crc kubenswrapper[5002]: E1203 16:56:19.240392 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:19 crc kubenswrapper[5002]: E1203 16:56:19.240497 5002 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server" Dec 03 16:56:20 crc kubenswrapper[5002]: E1203 16:56:20.833110 5002 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 03 16:56:20 crc kubenswrapper[5002]: E1203 16:56:20.833213 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts podName:721437ce-17b0-434b-9604-197f795ed1d9 nodeName:}" failed. No retries permitted until 2025-12-03 16:56:36.833192776 +0000 UTC m=+1520.247014664 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts") pod "novacell0be01-account-delete-r7zk7" (UID: "721437ce-17b0-434b-9604-197f795ed1d9") : configmap "openstack-scripts" not found Dec 03 16:56:22 crc kubenswrapper[5002]: I1203 16:56:22.640224 5002 generic.go:334] "Generic (PLEG): container finished" podID="14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" containerID="483b4040379f35157fa205bb3c7495c4b0ffa2ae303d246e906b2954e53e03f5" exitCode=0 Dec 03 16:56:22 crc kubenswrapper[5002]: I1203 16:56:22.640316 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-68bfc56b4f-vnlr5" event={"ID":"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c","Type":"ContainerDied","Data":"483b4040379f35157fa205bb3c7495c4b0ffa2ae303d246e906b2954e53e03f5"} Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.029711 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.176937 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-ovndb-tls-certs\") pod \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.177085 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmqc9\" (UniqueName: \"kubernetes.io/projected/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-kube-api-access-dmqc9\") pod \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.177179 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-public-tls-certs\") pod \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.177902 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-combined-ca-bundle\") pod \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.178143 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-httpd-config\") pod \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.178199 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-internal-tls-certs\") pod \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.178256 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-config\") pod \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\" (UID: \"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c\") " Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.182584 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-kube-api-access-dmqc9" (OuterVolumeSpecName: "kube-api-access-dmqc9") pod "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" (UID: "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c"). InnerVolumeSpecName "kube-api-access-dmqc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.183728 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" (UID: "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.225566 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-config" (OuterVolumeSpecName: "config") pod "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" (UID: "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.230333 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" (UID: "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.242699 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" (UID: "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.247487 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" (UID: "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.251369 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" (UID: "14e81d6c-fac9-4f5c-b69f-3b26720a0f5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.281264 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmqc9\" (UniqueName: \"kubernetes.io/projected/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-kube-api-access-dmqc9\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.281319 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.281339 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.281356 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.281373 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.281391 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.281407 5002 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.658644 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-68bfc56b4f-vnlr5" event={"ID":"14e81d6c-fac9-4f5c-b69f-3b26720a0f5c","Type":"ContainerDied","Data":"d04bfbb86d18ebb7af9ae348fb63aa74d0d5f68ca9be4717d63dcf629b3c5902"} Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.658710 5002 scope.go:117] "RemoveContainer" containerID="7319e26425e43fd3866431755e6e1859112eae7b40a15901924150e6da469063" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.658937 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-68bfc56b4f-vnlr5" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.707120 5002 scope.go:117] "RemoveContainer" containerID="483b4040379f35157fa205bb3c7495c4b0ffa2ae303d246e906b2954e53e03f5" Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.711628 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-68bfc56b4f-vnlr5"] Dec 03 16:56:23 crc kubenswrapper[5002]: I1203 16:56:23.716763 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-68bfc56b4f-vnlr5"] Dec 03 16:56:24 crc kubenswrapper[5002]: E1203 16:56:24.231814 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:24 crc kubenswrapper[5002]: E1203 16:56:24.232181 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:24 crc kubenswrapper[5002]: E1203 16:56:24.232675 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:24 crc kubenswrapper[5002]: E1203 16:56:24.232710 5002 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server" Dec 03 16:56:24 crc kubenswrapper[5002]: E1203 16:56:24.234388 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:24 crc kubenswrapper[5002]: E1203 16:56:24.236343 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:24 crc kubenswrapper[5002]: E1203 16:56:24.239437 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:24 crc kubenswrapper[5002]: E1203 16:56:24.239493 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovs-vswitchd" Dec 03 16:56:24 crc kubenswrapper[5002]: I1203 16:56:24.850420 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" path="/var/lib/kubelet/pods/14e81d6c-fac9-4f5c-b69f-3b26720a0f5c/volumes" Dec 03 16:56:29 crc kubenswrapper[5002]: E1203 16:56:29.231278 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:29 crc kubenswrapper[5002]: E1203 16:56:29.232893 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:29 crc kubenswrapper[5002]: E1203 16:56:29.232973 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:29 crc kubenswrapper[5002]: E1203 16:56:29.233987 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 03 16:56:29 crc kubenswrapper[5002]: E1203 16:56:29.234028 5002 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server" Dec 03 16:56:29 crc kubenswrapper[5002]: E1203 16:56:29.234863 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:29 crc kubenswrapper[5002]: E1203 16:56:29.236083 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 03 16:56:29 crc kubenswrapper[5002]: E1203 16:56:29.236140 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-jkwrt" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovs-vswitchd" Dec 03 16:56:30 crc kubenswrapper[5002]: I1203 16:56:30.750492 5002 generic.go:334] "Generic (PLEG): container finished" podID="f090a614-3703-461c-8152-226a5b53c16a" containerID="b8210908d5d09bcdac92e5295a766ab5588450583f98221265c5dc1fcef219e0" exitCode=137 Dec 03 16:56:30 crc kubenswrapper[5002]: I1203 16:56:30.750614 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"b8210908d5d09bcdac92e5295a766ab5588450583f98221265c5dc1fcef219e0"} Dec 03 16:56:30 crc kubenswrapper[5002]: I1203 16:56:30.864296 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 16:56:30 crc kubenswrapper[5002]: I1203 16:56:30.994501 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift\") pod \"f090a614-3703-461c-8152-226a5b53c16a\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " Dec 03 16:56:30 crc kubenswrapper[5002]: I1203 16:56:30.994544 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz5kk\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-kube-api-access-lz5kk\") pod \"f090a614-3703-461c-8152-226a5b53c16a\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " Dec 03 16:56:30 crc kubenswrapper[5002]: I1203 16:56:30.994567 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f090a614-3703-461c-8152-226a5b53c16a-cache\") pod \"f090a614-3703-461c-8152-226a5b53c16a\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " Dec 03 16:56:30 crc kubenswrapper[5002]: I1203 16:56:30.994720 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"f090a614-3703-461c-8152-226a5b53c16a\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " Dec 03 16:56:30 crc kubenswrapper[5002]: I1203 16:56:30.994766 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f090a614-3703-461c-8152-226a5b53c16a-lock\") pod \"f090a614-3703-461c-8152-226a5b53c16a\" (UID: \"f090a614-3703-461c-8152-226a5b53c16a\") " Dec 03 16:56:30 crc kubenswrapper[5002]: I1203 16:56:30.995402 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f090a614-3703-461c-8152-226a5b53c16a-lock" (OuterVolumeSpecName: "lock") pod "f090a614-3703-461c-8152-226a5b53c16a" (UID: "f090a614-3703-461c-8152-226a5b53c16a"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:30 crc kubenswrapper[5002]: I1203 16:56:30.995917 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f090a614-3703-461c-8152-226a5b53c16a-cache" (OuterVolumeSpecName: "cache") pod "f090a614-3703-461c-8152-226a5b53c16a" (UID: "f090a614-3703-461c-8152-226a5b53c16a"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.000434 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-kube-api-access-lz5kk" (OuterVolumeSpecName: "kube-api-access-lz5kk") pod "f090a614-3703-461c-8152-226a5b53c16a" (UID: "f090a614-3703-461c-8152-226a5b53c16a"). InnerVolumeSpecName "kube-api-access-lz5kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.000544 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "swift") pod "f090a614-3703-461c-8152-226a5b53c16a" (UID: "f090a614-3703-461c-8152-226a5b53c16a"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.001295 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "f090a614-3703-461c-8152-226a5b53c16a" (UID: "f090a614-3703-461c-8152-226a5b53c16a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.096512 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.096578 5002 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f090a614-3703-461c-8152-226a5b53c16a-lock\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.096608 5002 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.096626 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz5kk\" (UniqueName: \"kubernetes.io/projected/f090a614-3703-461c-8152-226a5b53c16a-kube-api-access-lz5kk\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.096647 5002 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f090a614-3703-461c-8152-226a5b53c16a-cache\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.121227 5002 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.198799 5002 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.762696 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jkwrt_f5cc28df-be84-4c87-b0fc-a523c5a23395/ovs-vswitchd/0.log" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.764271 5002 generic.go:334] "Generic (PLEG): container finished" podID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" exitCode=137 Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.764332 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jkwrt" event={"ID":"f5cc28df-be84-4c87-b0fc-a523c5a23395","Type":"ContainerDied","Data":"226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff"} Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.764382 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jkwrt" event={"ID":"f5cc28df-be84-4c87-b0fc-a523c5a23395","Type":"ContainerDied","Data":"8fb0fd59b2e0c33c594e3ce22470dc3a2f2b885cdcfc07dddb8879cca8b1fd1a"} Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.764399 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8fb0fd59b2e0c33c594e3ce22470dc3a2f2b885cdcfc07dddb8879cca8b1fd1a" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.775555 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f090a614-3703-461c-8152-226a5b53c16a","Type":"ContainerDied","Data":"5bb1594e739fe799aa82a8f48f853a7ecd9e6068fb751f01bd8cbf1ec5dfb599"} Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.775612 5002 scope.go:117] "RemoveContainer" containerID="b8210908d5d09bcdac92e5295a766ab5588450583f98221265c5dc1fcef219e0" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.775674 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.811041 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jkwrt_f5cc28df-be84-4c87-b0fc-a523c5a23395/ovs-vswitchd/0.log" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.811895 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.817232 5002 scope.go:117] "RemoveContainer" containerID="51ef41e140012493084ca9a5ee4771bb67457963ca1eb5c801a48e1b0525b81d" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.837092 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.844222 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.847571 5002 scope.go:117] "RemoveContainer" containerID="2259d0bf57741cf43caa6dace1c5a1419cb7906850811728b72c40313b3bf897" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.873027 5002 scope.go:117] "RemoveContainer" containerID="14c3879759c2b66a2378417de3744de65dc49d534bafb30583646388375fa453" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.888966 5002 scope.go:117] "RemoveContainer" containerID="818f3df7318a7e5d0b01bd79d58ad702a7eb69ce025a94d68c50fb54ed6f4b7d" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.905498 5002 scope.go:117] "RemoveContainer" containerID="56b7bbd59ef17d3e48ad12ed59f364881c2e4bfa9e7ece40383f0ef190962e10" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.909832 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-lib\") pod \"f5cc28df-be84-4c87-b0fc-a523c5a23395\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.909942 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5cc28df-be84-4c87-b0fc-a523c5a23395-scripts\") pod \"f5cc28df-be84-4c87-b0fc-a523c5a23395\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.909987 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-log\") pod \"f5cc28df-be84-4c87-b0fc-a523c5a23395\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.910021 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctrfl\" (UniqueName: \"kubernetes.io/projected/f5cc28df-be84-4c87-b0fc-a523c5a23395-kube-api-access-ctrfl\") pod \"f5cc28df-be84-4c87-b0fc-a523c5a23395\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.909941 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-lib" (OuterVolumeSpecName: "var-lib") pod "f5cc28df-be84-4c87-b0fc-a523c5a23395" (UID: "f5cc28df-be84-4c87-b0fc-a523c5a23395"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.910075 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-log" (OuterVolumeSpecName: "var-log") pod "f5cc28df-be84-4c87-b0fc-a523c5a23395" (UID: "f5cc28df-be84-4c87-b0fc-a523c5a23395"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.910117 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-etc-ovs\") pod \"f5cc28df-be84-4c87-b0fc-a523c5a23395\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.910177 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-run\") pod \"f5cc28df-be84-4c87-b0fc-a523c5a23395\" (UID: \"f5cc28df-be84-4c87-b0fc-a523c5a23395\") " Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.910355 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "f5cc28df-be84-4c87-b0fc-a523c5a23395" (UID: "f5cc28df-be84-4c87-b0fc-a523c5a23395"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.910394 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-run" (OuterVolumeSpecName: "var-run") pod "f5cc28df-be84-4c87-b0fc-a523c5a23395" (UID: "f5cc28df-be84-4c87-b0fc-a523c5a23395"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.910791 5002 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.910809 5002 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-lib\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.910821 5002 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-var-log\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.910832 5002 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f5cc28df-be84-4c87-b0fc-a523c5a23395-etc-ovs\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.912239 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5cc28df-be84-4c87-b0fc-a523c5a23395-scripts" (OuterVolumeSpecName: "scripts") pod "f5cc28df-be84-4c87-b0fc-a523c5a23395" (UID: "f5cc28df-be84-4c87-b0fc-a523c5a23395"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.915210 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5cc28df-be84-4c87-b0fc-a523c5a23395-kube-api-access-ctrfl" (OuterVolumeSpecName: "kube-api-access-ctrfl") pod "f5cc28df-be84-4c87-b0fc-a523c5a23395" (UID: "f5cc28df-be84-4c87-b0fc-a523c5a23395"). InnerVolumeSpecName "kube-api-access-ctrfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.923632 5002 scope.go:117] "RemoveContainer" containerID="9d9bb1df438ed50da59ac696f69efde7c6a48d1828bcb3dd1620fd321d2b4d34" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.956701 5002 scope.go:117] "RemoveContainer" containerID="90c36b07d1cc9b89cddb1a2322982944e4e056074c328fde3c02146dc0e50229" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.973017 5002 scope.go:117] "RemoveContainer" containerID="74b9cad1cdf521c7bfb58575456d72fa698d2f033219c2bcb6eeb10f75b16c25" Dec 03 16:56:31 crc kubenswrapper[5002]: I1203 16:56:31.989852 5002 scope.go:117] "RemoveContainer" containerID="a95935c6703da840b357416ca976f8c112b4e33b4bab1975af78cf849c48467d" Dec 03 16:56:32 crc kubenswrapper[5002]: I1203 16:56:32.010686 5002 scope.go:117] "RemoveContainer" containerID="a869513d51677c3c369edfd440f7ae36fb809c1de1a7e02989a558f03d0af6af" Dec 03 16:56:32 crc kubenswrapper[5002]: I1203 16:56:32.012098 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5cc28df-be84-4c87-b0fc-a523c5a23395-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:32 crc kubenswrapper[5002]: I1203 16:56:32.012143 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctrfl\" (UniqueName: \"kubernetes.io/projected/f5cc28df-be84-4c87-b0fc-a523c5a23395-kube-api-access-ctrfl\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:32 crc kubenswrapper[5002]: I1203 16:56:32.032159 5002 scope.go:117] "RemoveContainer" containerID="4e9275333706b76f736d2575ea8bf99de9ae2e8e214e70763b642bd4b982174e" Dec 03 16:56:32 crc kubenswrapper[5002]: I1203 16:56:32.049550 5002 scope.go:117] "RemoveContainer" containerID="8a698534e20e33c772ad47cef7ba71e2699abcc80985a3e4b85e7e699d61e5bc" Dec 03 16:56:32 crc kubenswrapper[5002]: I1203 16:56:32.068397 5002 scope.go:117] "RemoveContainer" containerID="295d415111976ea10f436b97bb0e928bbba6fc843889cfdd2399f44adfc4cf57" Dec 03 16:56:32 crc kubenswrapper[5002]: I1203 16:56:32.088894 5002 scope.go:117] "RemoveContainer" containerID="fd82b64468e87bb52951423d662c96298a04577e50fb7dfae08cf95f6cb95f60" Dec 03 16:56:32 crc kubenswrapper[5002]: I1203 16:56:32.784631 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-jkwrt" Dec 03 16:56:32 crc kubenswrapper[5002]: I1203 16:56:32.815095 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-jkwrt"] Dec 03 16:56:32 crc kubenswrapper[5002]: I1203 16:56:32.819939 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-jkwrt"] Dec 03 16:56:32 crc kubenswrapper[5002]: I1203 16:56:32.849347 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f090a614-3703-461c-8152-226a5b53c16a" path="/var/lib/kubelet/pods/f090a614-3703-461c-8152-226a5b53c16a/volumes" Dec 03 16:56:32 crc kubenswrapper[5002]: I1203 16:56:32.851259 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" path="/var/lib/kubelet/pods/f5cc28df-be84-4c87-b0fc-a523c5a23395/volumes" Dec 03 16:56:35 crc kubenswrapper[5002]: I1203 16:56:35.307558 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.195:3000/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.584036 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0be01-account-delete-r7zk7" Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.682718 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts\") pod \"721437ce-17b0-434b-9604-197f795ed1d9\" (UID: \"721437ce-17b0-434b-9604-197f795ed1d9\") " Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.682794 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz5r2\" (UniqueName: \"kubernetes.io/projected/721437ce-17b0-434b-9604-197f795ed1d9-kube-api-access-wz5r2\") pod \"721437ce-17b0-434b-9604-197f795ed1d9\" (UID: \"721437ce-17b0-434b-9604-197f795ed1d9\") " Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.683672 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "721437ce-17b0-434b-9604-197f795ed1d9" (UID: "721437ce-17b0-434b-9604-197f795ed1d9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.693404 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/721437ce-17b0-434b-9604-197f795ed1d9-kube-api-access-wz5r2" (OuterVolumeSpecName: "kube-api-access-wz5r2") pod "721437ce-17b0-434b-9604-197f795ed1d9" (UID: "721437ce-17b0-434b-9604-197f795ed1d9"). InnerVolumeSpecName "kube-api-access-wz5r2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.784366 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/721437ce-17b0-434b-9604-197f795ed1d9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.784407 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz5r2\" (UniqueName: \"kubernetes.io/projected/721437ce-17b0-434b-9604-197f795ed1d9-kube-api-access-wz5r2\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.830679 5002 generic.go:334] "Generic (PLEG): container finished" podID="721437ce-17b0-434b-9604-197f795ed1d9" containerID="e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f" exitCode=137 Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.830729 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0be01-account-delete-r7zk7" event={"ID":"721437ce-17b0-434b-9604-197f795ed1d9","Type":"ContainerDied","Data":"e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f"} Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.830786 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0be01-account-delete-r7zk7" event={"ID":"721437ce-17b0-434b-9604-197f795ed1d9","Type":"ContainerDied","Data":"383af97d21335016a4ee42514ad7c02f5401fc06e2ef0ca38f199ba1e62faa46"} Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.830815 5002 scope.go:117] "RemoveContainer" containerID="e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f" Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.830833 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0be01-account-delete-r7zk7" Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.852523 5002 scope.go:117] "RemoveContainer" containerID="e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f" Dec 03 16:56:36 crc kubenswrapper[5002]: E1203 16:56:36.853255 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f\": container with ID starting with e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f not found: ID does not exist" containerID="e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f" Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.853311 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f"} err="failed to get container status \"e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f\": rpc error: code = NotFound desc = could not find container \"e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f\": container with ID starting with e48903acbf0ae099022ba94bde804558eabc77349eafe0ae1c0bf4c2fcce050f not found: ID does not exist" Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.876419 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0be01-account-delete-r7zk7"] Dec 03 16:56:36 crc kubenswrapper[5002]: I1203 16:56:36.883704 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell0be01-account-delete-r7zk7"] Dec 03 16:56:38 crc kubenswrapper[5002]: I1203 16:56:38.855191 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="721437ce-17b0-434b-9604-197f795ed1d9" path="/var/lib/kubelet/pods/721437ce-17b0-434b-9604-197f795ed1d9/volumes" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.714107 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zgvpw"] Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.716606 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" containerName="glance-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.716789 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" containerName="glance-log" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.716969 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="922c692b-3d5c-45df-862d-d4e08b06fe0b" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.717110 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="922c692b-3d5c-45df-862d-d4e08b06fe0b" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.717234 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34a0fbfb-baac-41c7-8430-cb0e1720dfa5" containerName="dnsmasq-dns" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.717346 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="34a0fbfb-baac-41c7-8430-cb0e1720dfa5" containerName="dnsmasq-dns" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.717466 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29a68818-9346-4437-9527-aea9383c1a25" containerName="openstack-network-exporter" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.717602 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="29a68818-9346-4437-9527-aea9383c1a25" containerName="openstack-network-exporter" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.717721 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" containerName="neutron-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.717881 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" containerName="neutron-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.718025 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-auditor" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.718138 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-auditor" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.718272 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a598f1-0f32-448c-b08f-b5b0e70f583d" containerName="setup-container" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.718394 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a598f1-0f32-448c-b08f-b5b0e70f583d" containerName="setup-container" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.718540 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eacefa0-a1f4-4181-ab8e-201efd0fc67e" containerName="openstack-network-exporter" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.718668 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eacefa0-a1f4-4181-ab8e-201efd0fc67e" containerName="openstack-network-exporter" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.721105 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" containerName="neutron-api" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.721279 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" containerName="neutron-api" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.721413 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c099352-abbe-4c3a-9431-c854e5333420" containerName="placement-api" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.721538 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c099352-abbe-4c3a-9431-c854e5333420" containerName="placement-api" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.721666 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36469a67-4d79-419f-9aaf-a1c128132287" containerName="probe" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.721809 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="36469a67-4d79-419f-9aaf-a1c128132287" containerName="probe" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.721950 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-auditor" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.722067 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-auditor" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.722193 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b132eed4-cb4d-4abc-b49a-55688686288d" containerName="mysql-bootstrap" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.722305 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b132eed4-cb4d-4abc-b49a-55688686288d" containerName="mysql-bootstrap" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.722436 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f59bead-66d7-4fcb-842f-e573fcadcf1f" containerName="openstack-network-exporter" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.722553 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f59bead-66d7-4fcb-842f-e573fcadcf1f" containerName="openstack-network-exporter" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.722674 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-replicator" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.722816 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-replicator" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.722953 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bea5a03b-519f-4af4-873a-e5e7f9e8f769" containerName="kube-state-metrics" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.723070 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bea5a03b-519f-4af4-873a-e5e7f9e8f769" containerName="kube-state-metrics" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.723197 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9922091-0d6f-44cf-9b98-8b97a811ea26" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.723309 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9922091-0d6f-44cf-9b98-8b97a811ea26" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.723435 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="rsync" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.723571 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="rsync" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.723697 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="ceilometer-central-agent" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.723852 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="ceilometer-central-agent" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.724032 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" containerName="barbican-keystone-listener" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.724136 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" containerName="barbican-keystone-listener" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.724250 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server-init" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.724356 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server-init" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.724464 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="965b05ab-f8e9-485e-9f15-2160a598d8c2" containerName="cinder-api" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.724570 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="965b05ab-f8e9-485e-9f15-2160a598d8c2" containerName="cinder-api" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.724691 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="sg-core" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.724847 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="sg-core" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.724970 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-updater" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.725078 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-updater" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.725209 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" containerName="glance-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.725310 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" containerName="glance-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.725429 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="852cfff4-0855-40ab-a82d-b560c37118bf" containerName="memcached" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.725544 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="852cfff4-0855-40ab-a82d-b560c37118bf" containerName="memcached" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.725663 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05d0c2be-6525-4ec1-bcae-e240255c970a" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.725804 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="05d0c2be-6525-4ec1-bcae-e240255c970a" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.725932 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30820296-8679-481c-9466-014d473e51ee" containerName="proxy-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.726050 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="30820296-8679-481c-9466-014d473e51ee" containerName="proxy-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.726166 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d105ad9d-fbca-4a0c-b188-a88a363756c2" containerName="glance-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.726282 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d105ad9d-fbca-4a0c-b188-a88a363756c2" containerName="glance-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.726394 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-server" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.726502 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-server" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.726615 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a3af9fa-d550-4d97-8d54-b198f0ca6f31" containerName="nova-cell1-conductor-conductor" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.726730 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a3af9fa-d550-4d97-8d54-b198f0ca6f31" containerName="nova-cell1-conductor-conductor" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.726894 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="965b05ab-f8e9-485e-9f15-2160a598d8c2" containerName="cinder-api-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.727012 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="965b05ab-f8e9-485e-9f15-2160a598d8c2" containerName="cinder-api-log" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.727121 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-replicator" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.727235 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-replicator" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.727352 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f69c54d-bd52-413b-86b6-6b5c4ca765ba" containerName="ovsdbserver-nb" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.727458 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f69c54d-bd52-413b-86b6-6b5c4ca765ba" containerName="ovsdbserver-nb" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.727564 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a598f1-0f32-448c-b08f-b5b0e70f583d" containerName="rabbitmq" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.727675 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a598f1-0f32-448c-b08f-b5b0e70f583d" containerName="rabbitmq" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.727832 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eacefa0-a1f4-4181-ab8e-201efd0fc67e" containerName="ovsdbserver-sb" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.727991 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eacefa0-a1f4-4181-ab8e-201efd0fc67e" containerName="ovsdbserver-sb" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.728113 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b576f304-fd7e-419e-937c-dafaf1c00970" containerName="nova-api-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.728217 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b576f304-fd7e-419e-937c-dafaf1c00970" containerName="nova-api-log" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.728337 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36469a67-4d79-419f-9aaf-a1c128132287" containerName="cinder-scheduler" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.728449 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="36469a67-4d79-419f-9aaf-a1c128132287" containerName="cinder-scheduler" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.728572 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" containerName="barbican-api" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.728709 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" containerName="barbican-api" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.728877 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" containerName="barbican-worker" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.728994 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" containerName="barbican-worker" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.729118 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86921476-d5b9-4fc0-86d1-aa82dd931e5f" containerName="nova-scheduler-scheduler" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.729233 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="86921476-d5b9-4fc0-86d1-aa82dd931e5f" containerName="nova-scheduler-scheduler" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.732505 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de753fc7-23ae-4680-9d4c-11e5632d749d" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.732688 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="de753fc7-23ae-4680-9d4c-11e5632d749d" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.732830 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60743dc4-9a30-4fd2-80c1-0c7427241e92" containerName="galera" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.732923 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="60743dc4-9a30-4fd2-80c1-0c7427241e92" containerName="galera" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.733046 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerName="nova-metadata-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.733125 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerName="nova-metadata-log" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.733206 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13c5aef5-d2f5-4449-8cce-125cdf61d06b" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.733277 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="13c5aef5-d2f5-4449-8cce-125cdf61d06b" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.733354 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7" containerName="nova-cell0-conductor-conductor" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.733424 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7" containerName="nova-cell0-conductor-conductor" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.733509 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.733595 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.733711 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc3c0d8b-823d-42bc-a114-766494075e59" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.733832 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc3c0d8b-823d-42bc-a114-766494075e59" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.733917 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-expirer" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.733987 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-expirer" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.734063 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d105ad9d-fbca-4a0c-b188-a88a363756c2" containerName="glance-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.734595 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d105ad9d-fbca-4a0c-b188-a88a363756c2" containerName="glance-log" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.734681 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e876c11-14f1-4e51-90a1-e2cdddc08c87" containerName="ovn-controller" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.734778 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e876c11-14f1-4e51-90a1-e2cdddc08c87" containerName="ovn-controller" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.734864 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" containerName="barbican-api-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.734932 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" containerName="barbican-api-log" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.735007 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="721437ce-17b0-434b-9604-197f795ed1d9" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.735077 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="721437ce-17b0-434b-9604-197f795ed1d9" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.735151 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b132eed4-cb4d-4abc-b49a-55688686288d" containerName="galera" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.735238 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b132eed4-cb4d-4abc-b49a-55688686288d" containerName="galera" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.735348 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" containerName="keystone-api" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.735441 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" containerName="keystone-api" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.735540 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="proxy-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.735636 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="proxy-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.735721 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="swift-recon-cron" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.735886 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="swift-recon-cron" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.735974 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-server" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.736071 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-server" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.736165 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-reaper" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.736260 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-reaper" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.736359 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c099352-abbe-4c3a-9431-c854e5333420" containerName="placement-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.736456 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c099352-abbe-4c3a-9431-c854e5333420" containerName="placement-log" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.736569 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-replicator" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.736664 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-replicator" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.736786 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovs-vswitchd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.736890 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovs-vswitchd" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.736990 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29a68818-9346-4437-9527-aea9383c1a25" containerName="ovn-northd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.737070 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="29a68818-9346-4437-9527-aea9383c1a25" containerName="ovn-northd" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.737168 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60743dc4-9a30-4fd2-80c1-0c7427241e92" containerName="mysql-bootstrap" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.737265 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="60743dc4-9a30-4fd2-80c1-0c7427241e92" containerName="mysql-bootstrap" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.737360 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="382d6556-c45b-43dd-a4fa-16b3e91e0725" containerName="setup-container" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.737451 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="382d6556-c45b-43dd-a4fa-16b3e91e0725" containerName="setup-container" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.737576 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b576f304-fd7e-419e-937c-dafaf1c00970" containerName="nova-api-api" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.737668 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b576f304-fd7e-419e-937c-dafaf1c00970" containerName="nova-api-api" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.737790 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33f61f67-5ef5-41a9-8bea-3335115b78e9" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.737894 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="33f61f67-5ef5-41a9-8bea-3335115b78e9" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.737994 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="ceilometer-notification-agent" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.738078 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="ceilometer-notification-agent" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.738184 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="382d6556-c45b-43dd-a4fa-16b3e91e0725" containerName="rabbitmq" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.738280 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="382d6556-c45b-43dd-a4fa-16b3e91e0725" containerName="rabbitmq" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.738381 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-updater" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.738475 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-updater" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.738575 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" containerName="barbican-worker-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.738672 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" containerName="barbican-worker-log" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.738787 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f69c54d-bd52-413b-86b6-6b5c4ca765ba" containerName="openstack-network-exporter" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.738871 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f69c54d-bd52-413b-86b6-6b5c4ca765ba" containerName="openstack-network-exporter" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.738941 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerName="nova-metadata-metadata" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.739013 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerName="nova-metadata-metadata" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.739083 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34a0fbfb-baac-41c7-8430-cb0e1720dfa5" containerName="init" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.739161 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="34a0fbfb-baac-41c7-8430-cb0e1720dfa5" containerName="init" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.739232 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" containerName="barbican-keystone-listener-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.739313 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" containerName="barbican-keystone-listener-log" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.739402 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30820296-8679-481c-9466-014d473e51ee" containerName="proxy-server" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.739489 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="30820296-8679-481c-9466-014d473e51ee" containerName="proxy-server" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.739577 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-auditor" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.739643 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-auditor" Dec 03 16:57:15 crc kubenswrapper[5002]: E1203 16:57:15.739717 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-server" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.739876 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-server" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.740218 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9eacefa0-a1f4-4181-ab8e-201efd0fc67e" containerName="ovsdbserver-sb" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.740304 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-replicator" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.740379 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="dff65c7c-f3cc-45d5-aff8-4b2f9482a3f0" containerName="keystone-api" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.740460 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b576f304-fd7e-419e-937c-dafaf1c00970" containerName="nova-api-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.740554 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="swift-recon-cron" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.740634 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9eacefa0-a1f4-4181-ab8e-201efd0fc67e" containerName="openstack-network-exporter" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.740733 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-server" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.740846 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7a598f1-0f32-448c-b08f-b5b0e70f583d" containerName="rabbitmq" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.740935 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" containerName="glance-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.741009 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="60743dc4-9a30-4fd2-80c1-0c7427241e92" containerName="galera" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.741084 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="36469a67-4d79-419f-9aaf-a1c128132287" containerName="probe" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.741874 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovs-vswitchd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.741910 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="05d0c2be-6525-4ec1-bcae-e240255c970a" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.741934 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" containerName="barbican-api" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.741950 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b132eed4-cb4d-4abc-b49a-55688686288d" containerName="galera" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.741960 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="bea5a03b-519f-4af4-873a-e5e7f9e8f769" containerName="kube-state-metrics" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.741975 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-updater" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.741991 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-expirer" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742005 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ffc70c1-b9a1-4f12-83e2-e0d695fc24a7" containerName="nova-cell0-conductor-conductor" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742028 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="29a68818-9346-4437-9527-aea9383c1a25" containerName="openstack-network-exporter" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742043 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" containerName="neutron-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742055 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="382d6556-c45b-43dd-a4fa-16b3e91e0725" containerName="rabbitmq" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742070 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-server" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742091 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-replicator" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742110 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" containerName="barbican-keystone-listener-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742132 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="721437ce-17b0-434b-9604-197f795ed1d9" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742147 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="86921476-d5b9-4fc0-86d1-aa82dd931e5f" containerName="nova-scheduler-scheduler" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742158 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="55e94451-ebc7-4a6c-9927-df89ae0fc3c2" containerName="barbican-api-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742173 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-auditor" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742187 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d105ad9d-fbca-4a0c-b188-a88a363756c2" containerName="glance-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742202 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="sg-core" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742216 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-reaper" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742234 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="922c692b-3d5c-45df-862d-d4e08b06fe0b" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742249 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="ceilometer-notification-agent" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742262 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="33f61f67-5ef5-41a9-8bea-3335115b78e9" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742274 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f59bead-66d7-4fcb-842f-e573fcadcf1f" containerName="openstack-network-exporter" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742288 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="965b05ab-f8e9-485e-9f15-2160a598d8c2" containerName="cinder-api-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742302 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="13c5aef5-d2f5-4449-8cce-125cdf61d06b" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742315 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e876c11-14f1-4e51-90a1-e2cdddc08c87" containerName="ovn-controller" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742328 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-auditor" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742343 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerName="nova-metadata-metadata" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742357 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-auditor" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742371 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="40fc41cc-8c4d-4d6e-8aa7-32abe0dcee5f" containerName="barbican-keystone-listener" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742382 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="object-updater" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742391 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="ceilometer-central-agent" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742401 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="29a68818-9346-4437-9527-aea9383c1a25" containerName="ovn-northd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742413 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="account-server" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742427 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="30820296-8679-481c-9466-014d473e51ee" containerName="proxy-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742453 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="852cfff4-0855-40ab-a82d-b560c37118bf" containerName="memcached" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742469 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="36469a67-4d79-419f-9aaf-a1c128132287" containerName="cinder-scheduler" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742488 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" containerName="barbican-worker" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742505 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc3c0d8b-823d-42bc-a114-766494075e59" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742519 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="rsync" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742534 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b576f304-fd7e-419e-937c-dafaf1c00970" containerName="nova-api-api" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742547 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="30820296-8679-481c-9466-014d473e51ee" containerName="proxy-server" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742561 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa99da8f-30ec-4100-bcb7-56d6c7a7e7e9" containerName="barbican-worker-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742596 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec3d3d6b-b1df-4d20-8fc7-ca518ea56929" containerName="glance-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742612 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e408ec9-5662-43a6-93fd-9fa7a60f98db" containerName="nova-metadata-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742625 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c099352-abbe-4c3a-9431-c854e5333420" containerName="placement-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742639 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c099352-abbe-4c3a-9431-c854e5333420" containerName="placement-api" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742657 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5cc28df-be84-4c87-b0fc-a523c5a23395" containerName="ovsdb-server" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742668 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f69c54d-bd52-413b-86b6-6b5c4ca765ba" containerName="openstack-network-exporter" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742684 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="de753fc7-23ae-4680-9d4c-11e5632d749d" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742709 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f69c54d-bd52-413b-86b6-6b5c4ca765ba" containerName="ovsdbserver-nb" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742720 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a3af9fa-d550-4d97-8d54-b198f0ca6f31" containerName="nova-cell1-conductor-conductor" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742730 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f090a614-3703-461c-8152-226a5b53c16a" containerName="container-replicator" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742738 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d105ad9d-fbca-4a0c-b188-a88a363756c2" containerName="glance-log" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742785 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="965b05ab-f8e9-485e-9f15-2160a598d8c2" containerName="cinder-api" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742798 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="14e81d6c-fac9-4f5c-b69f-3b26720a0f5c" containerName="neutron-api" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742807 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9922091-0d6f-44cf-9b98-8b97a811ea26" containerName="mariadb-account-delete" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742817 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35ed8f9-f4d0-4987-9d3d-83b1b04d9b6a" containerName="proxy-httpd" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.742828 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="34a0fbfb-baac-41c7-8430-cb0e1720dfa5" containerName="dnsmasq-dns" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.746586 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zgvpw"] Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.746781 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.872486 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-catalog-content\") pod \"certified-operators-zgvpw\" (UID: \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\") " pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.872924 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-utilities\") pod \"certified-operators-zgvpw\" (UID: \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\") " pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.872953 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl8hz\" (UniqueName: \"kubernetes.io/projected/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-kube-api-access-gl8hz\") pod \"certified-operators-zgvpw\" (UID: \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\") " pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.974466 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-utilities\") pod \"certified-operators-zgvpw\" (UID: \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\") " pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.974514 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl8hz\" (UniqueName: \"kubernetes.io/projected/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-kube-api-access-gl8hz\") pod \"certified-operators-zgvpw\" (UID: \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\") " pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.974631 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-catalog-content\") pod \"certified-operators-zgvpw\" (UID: \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\") " pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.975181 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-utilities\") pod \"certified-operators-zgvpw\" (UID: \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\") " pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.975204 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-catalog-content\") pod \"certified-operators-zgvpw\" (UID: \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\") " pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:15 crc kubenswrapper[5002]: I1203 16:57:15.994899 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl8hz\" (UniqueName: \"kubernetes.io/projected/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-kube-api-access-gl8hz\") pod \"certified-operators-zgvpw\" (UID: \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\") " pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:16 crc kubenswrapper[5002]: I1203 16:57:16.082836 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:16 crc kubenswrapper[5002]: I1203 16:57:16.567832 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zgvpw"] Dec 03 16:57:17 crc kubenswrapper[5002]: I1203 16:57:17.272165 5002 generic.go:334] "Generic (PLEG): container finished" podID="9acd1f9a-2dc7-4941-82a4-444e0d9787e5" containerID="f51d7d986eb6d156646b67f2db7367e89cb92c1de07c458c73d7b5a99e11b454" exitCode=0 Dec 03 16:57:17 crc kubenswrapper[5002]: I1203 16:57:17.272212 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgvpw" event={"ID":"9acd1f9a-2dc7-4941-82a4-444e0d9787e5","Type":"ContainerDied","Data":"f51d7d986eb6d156646b67f2db7367e89cb92c1de07c458c73d7b5a99e11b454"} Dec 03 16:57:17 crc kubenswrapper[5002]: I1203 16:57:17.272239 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgvpw" event={"ID":"9acd1f9a-2dc7-4941-82a4-444e0d9787e5","Type":"ContainerStarted","Data":"67dc86a3260ba48e415ea2c84b2e49456a7dfd292cc6a806b4082841a1b94877"} Dec 03 16:57:18 crc kubenswrapper[5002]: I1203 16:57:18.282254 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgvpw" event={"ID":"9acd1f9a-2dc7-4941-82a4-444e0d9787e5","Type":"ContainerStarted","Data":"f303436db146a7d70537a0c688900ffd8d41e927553c0a71432fab8a99f83e3e"} Dec 03 16:57:19 crc kubenswrapper[5002]: I1203 16:57:19.291585 5002 generic.go:334] "Generic (PLEG): container finished" podID="9acd1f9a-2dc7-4941-82a4-444e0d9787e5" containerID="f303436db146a7d70537a0c688900ffd8d41e927553c0a71432fab8a99f83e3e" exitCode=0 Dec 03 16:57:19 crc kubenswrapper[5002]: I1203 16:57:19.291639 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgvpw" event={"ID":"9acd1f9a-2dc7-4941-82a4-444e0d9787e5","Type":"ContainerDied","Data":"f303436db146a7d70537a0c688900ffd8d41e927553c0a71432fab8a99f83e3e"} Dec 03 16:57:20 crc kubenswrapper[5002]: I1203 16:57:20.304049 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgvpw" event={"ID":"9acd1f9a-2dc7-4941-82a4-444e0d9787e5","Type":"ContainerStarted","Data":"53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a"} Dec 03 16:57:20 crc kubenswrapper[5002]: I1203 16:57:20.328424 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zgvpw" podStartSLOduration=2.701140695 podStartE2EDuration="5.328405419s" podCreationTimestamp="2025-12-03 16:57:15 +0000 UTC" firstStartedPulling="2025-12-03 16:57:17.27387433 +0000 UTC m=+1560.687696238" lastFinishedPulling="2025-12-03 16:57:19.901139074 +0000 UTC m=+1563.314960962" observedRunningTime="2025-12-03 16:57:20.324135732 +0000 UTC m=+1563.737957690" watchObservedRunningTime="2025-12-03 16:57:20.328405419 +0000 UTC m=+1563.742227327" Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.478839 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pqsn9"] Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.480844 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.491095 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pqsn9"] Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.576658 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f02b254-7cb6-486d-87e8-6de21f6f176c-utilities\") pod \"community-operators-pqsn9\" (UID: \"6f02b254-7cb6-486d-87e8-6de21f6f176c\") " pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.576732 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f02b254-7cb6-486d-87e8-6de21f6f176c-catalog-content\") pod \"community-operators-pqsn9\" (UID: \"6f02b254-7cb6-486d-87e8-6de21f6f176c\") " pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.576961 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvvvg\" (UniqueName: \"kubernetes.io/projected/6f02b254-7cb6-486d-87e8-6de21f6f176c-kube-api-access-mvvvg\") pod \"community-operators-pqsn9\" (UID: \"6f02b254-7cb6-486d-87e8-6de21f6f176c\") " pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.678907 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvvvg\" (UniqueName: \"kubernetes.io/projected/6f02b254-7cb6-486d-87e8-6de21f6f176c-kube-api-access-mvvvg\") pod \"community-operators-pqsn9\" (UID: \"6f02b254-7cb6-486d-87e8-6de21f6f176c\") " pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.679019 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f02b254-7cb6-486d-87e8-6de21f6f176c-utilities\") pod \"community-operators-pqsn9\" (UID: \"6f02b254-7cb6-486d-87e8-6de21f6f176c\") " pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.679063 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f02b254-7cb6-486d-87e8-6de21f6f176c-catalog-content\") pod \"community-operators-pqsn9\" (UID: \"6f02b254-7cb6-486d-87e8-6de21f6f176c\") " pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.679672 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f02b254-7cb6-486d-87e8-6de21f6f176c-catalog-content\") pod \"community-operators-pqsn9\" (UID: \"6f02b254-7cb6-486d-87e8-6de21f6f176c\") " pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.679693 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f02b254-7cb6-486d-87e8-6de21f6f176c-utilities\") pod \"community-operators-pqsn9\" (UID: \"6f02b254-7cb6-486d-87e8-6de21f6f176c\") " pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.706922 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvvvg\" (UniqueName: \"kubernetes.io/projected/6f02b254-7cb6-486d-87e8-6de21f6f176c-kube-api-access-mvvvg\") pod \"community-operators-pqsn9\" (UID: \"6f02b254-7cb6-486d-87e8-6de21f6f176c\") " pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:22 crc kubenswrapper[5002]: I1203 16:57:22.814602 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:23 crc kubenswrapper[5002]: I1203 16:57:23.104661 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pqsn9"] Dec 03 16:57:23 crc kubenswrapper[5002]: W1203 16:57:23.113261 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f02b254_7cb6_486d_87e8_6de21f6f176c.slice/crio-ebd0a3bc1555caf474375f94541ca37ab431914626b288411c6bce7916ccfac0 WatchSource:0}: Error finding container ebd0a3bc1555caf474375f94541ca37ab431914626b288411c6bce7916ccfac0: Status 404 returned error can't find the container with id ebd0a3bc1555caf474375f94541ca37ab431914626b288411c6bce7916ccfac0 Dec 03 16:57:23 crc kubenswrapper[5002]: I1203 16:57:23.346936 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pqsn9" event={"ID":"6f02b254-7cb6-486d-87e8-6de21f6f176c","Type":"ContainerStarted","Data":"f127347fbe31dc7c12b511c51ceeed3b95e984dcbb8e7e62d56929e834ea3657"} Dec 03 16:57:23 crc kubenswrapper[5002]: I1203 16:57:23.346977 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pqsn9" event={"ID":"6f02b254-7cb6-486d-87e8-6de21f6f176c","Type":"ContainerStarted","Data":"ebd0a3bc1555caf474375f94541ca37ab431914626b288411c6bce7916ccfac0"} Dec 03 16:57:24 crc kubenswrapper[5002]: I1203 16:57:24.357624 5002 generic.go:334] "Generic (PLEG): container finished" podID="6f02b254-7cb6-486d-87e8-6de21f6f176c" containerID="f127347fbe31dc7c12b511c51ceeed3b95e984dcbb8e7e62d56929e834ea3657" exitCode=0 Dec 03 16:57:24 crc kubenswrapper[5002]: I1203 16:57:24.357722 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pqsn9" event={"ID":"6f02b254-7cb6-486d-87e8-6de21f6f176c","Type":"ContainerDied","Data":"f127347fbe31dc7c12b511c51ceeed3b95e984dcbb8e7e62d56929e834ea3657"} Dec 03 16:57:26 crc kubenswrapper[5002]: I1203 16:57:26.083084 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:26 crc kubenswrapper[5002]: I1203 16:57:26.087263 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:26 crc kubenswrapper[5002]: I1203 16:57:26.141883 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:26 crc kubenswrapper[5002]: I1203 16:57:26.428833 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:27 crc kubenswrapper[5002]: I1203 16:57:27.286112 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zgvpw"] Dec 03 16:57:28 crc kubenswrapper[5002]: I1203 16:57:28.388977 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zgvpw" podUID="9acd1f9a-2dc7-4941-82a4-444e0d9787e5" containerName="registry-server" containerID="cri-o://53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a" gracePeriod=2 Dec 03 16:57:29 crc kubenswrapper[5002]: I1203 16:57:29.762970 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:29 crc kubenswrapper[5002]: I1203 16:57:29.779609 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gl8hz\" (UniqueName: \"kubernetes.io/projected/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-kube-api-access-gl8hz\") pod \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\" (UID: \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\") " Dec 03 16:57:29 crc kubenswrapper[5002]: I1203 16:57:29.779739 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-utilities\") pod \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\" (UID: \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\") " Dec 03 16:57:29 crc kubenswrapper[5002]: I1203 16:57:29.779876 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-catalog-content\") pod \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\" (UID: \"9acd1f9a-2dc7-4941-82a4-444e0d9787e5\") " Dec 03 16:57:29 crc kubenswrapper[5002]: I1203 16:57:29.782319 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-utilities" (OuterVolumeSpecName: "utilities") pod "9acd1f9a-2dc7-4941-82a4-444e0d9787e5" (UID: "9acd1f9a-2dc7-4941-82a4-444e0d9787e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:57:29 crc kubenswrapper[5002]: I1203 16:57:29.787868 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-kube-api-access-gl8hz" (OuterVolumeSpecName: "kube-api-access-gl8hz") pod "9acd1f9a-2dc7-4941-82a4-444e0d9787e5" (UID: "9acd1f9a-2dc7-4941-82a4-444e0d9787e5"). InnerVolumeSpecName "kube-api-access-gl8hz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:57:29 crc kubenswrapper[5002]: I1203 16:57:29.841677 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9acd1f9a-2dc7-4941-82a4-444e0d9787e5" (UID: "9acd1f9a-2dc7-4941-82a4-444e0d9787e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:57:29 crc kubenswrapper[5002]: I1203 16:57:29.881673 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:57:29 crc kubenswrapper[5002]: I1203 16:57:29.881703 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:57:29 crc kubenswrapper[5002]: I1203 16:57:29.881714 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gl8hz\" (UniqueName: \"kubernetes.io/projected/9acd1f9a-2dc7-4941-82a4-444e0d9787e5-kube-api-access-gl8hz\") on node \"crc\" DevicePath \"\"" Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.410591 5002 generic.go:334] "Generic (PLEG): container finished" podID="9acd1f9a-2dc7-4941-82a4-444e0d9787e5" containerID="53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a" exitCode=0 Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.410648 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgvpw" Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.410670 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgvpw" event={"ID":"9acd1f9a-2dc7-4941-82a4-444e0d9787e5","Type":"ContainerDied","Data":"53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a"} Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.411064 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgvpw" event={"ID":"9acd1f9a-2dc7-4941-82a4-444e0d9787e5","Type":"ContainerDied","Data":"67dc86a3260ba48e415ea2c84b2e49456a7dfd292cc6a806b4082841a1b94877"} Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.411086 5002 scope.go:117] "RemoveContainer" containerID="53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a" Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.413340 5002 generic.go:334] "Generic (PLEG): container finished" podID="6f02b254-7cb6-486d-87e8-6de21f6f176c" containerID="012b0359ceb9095ba10f1bbfaab9db40c6e56ef5d3ff67e1ffa7c8ea80e32d65" exitCode=0 Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.413396 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pqsn9" event={"ID":"6f02b254-7cb6-486d-87e8-6de21f6f176c","Type":"ContainerDied","Data":"012b0359ceb9095ba10f1bbfaab9db40c6e56ef5d3ff67e1ffa7c8ea80e32d65"} Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.431995 5002 scope.go:117] "RemoveContainer" containerID="f303436db146a7d70537a0c688900ffd8d41e927553c0a71432fab8a99f83e3e" Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.463730 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zgvpw"] Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.469278 5002 scope.go:117] "RemoveContainer" containerID="f51d7d986eb6d156646b67f2db7367e89cb92c1de07c458c73d7b5a99e11b454" Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.472194 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zgvpw"] Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.491030 5002 scope.go:117] "RemoveContainer" containerID="53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a" Dec 03 16:57:30 crc kubenswrapper[5002]: E1203 16:57:30.491389 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a\": container with ID starting with 53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a not found: ID does not exist" containerID="53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a" Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.491418 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a"} err="failed to get container status \"53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a\": rpc error: code = NotFound desc = could not find container \"53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a\": container with ID starting with 53a83917391769d2c6a8f5b73ecd72bc436e50f2b105dda452a461db46239c8a not found: ID does not exist" Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.491439 5002 scope.go:117] "RemoveContainer" containerID="f303436db146a7d70537a0c688900ffd8d41e927553c0a71432fab8a99f83e3e" Dec 03 16:57:30 crc kubenswrapper[5002]: E1203 16:57:30.491818 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f303436db146a7d70537a0c688900ffd8d41e927553c0a71432fab8a99f83e3e\": container with ID starting with f303436db146a7d70537a0c688900ffd8d41e927553c0a71432fab8a99f83e3e not found: ID does not exist" containerID="f303436db146a7d70537a0c688900ffd8d41e927553c0a71432fab8a99f83e3e" Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.491885 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f303436db146a7d70537a0c688900ffd8d41e927553c0a71432fab8a99f83e3e"} err="failed to get container status \"f303436db146a7d70537a0c688900ffd8d41e927553c0a71432fab8a99f83e3e\": rpc error: code = NotFound desc = could not find container \"f303436db146a7d70537a0c688900ffd8d41e927553c0a71432fab8a99f83e3e\": container with ID starting with f303436db146a7d70537a0c688900ffd8d41e927553c0a71432fab8a99f83e3e not found: ID does not exist" Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.491931 5002 scope.go:117] "RemoveContainer" containerID="f51d7d986eb6d156646b67f2db7367e89cb92c1de07c458c73d7b5a99e11b454" Dec 03 16:57:30 crc kubenswrapper[5002]: E1203 16:57:30.492399 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f51d7d986eb6d156646b67f2db7367e89cb92c1de07c458c73d7b5a99e11b454\": container with ID starting with f51d7d986eb6d156646b67f2db7367e89cb92c1de07c458c73d7b5a99e11b454 not found: ID does not exist" containerID="f51d7d986eb6d156646b67f2db7367e89cb92c1de07c458c73d7b5a99e11b454" Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.492429 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f51d7d986eb6d156646b67f2db7367e89cb92c1de07c458c73d7b5a99e11b454"} err="failed to get container status \"f51d7d986eb6d156646b67f2db7367e89cb92c1de07c458c73d7b5a99e11b454\": rpc error: code = NotFound desc = could not find container \"f51d7d986eb6d156646b67f2db7367e89cb92c1de07c458c73d7b5a99e11b454\": container with ID starting with f51d7d986eb6d156646b67f2db7367e89cb92c1de07c458c73d7b5a99e11b454 not found: ID does not exist" Dec 03 16:57:30 crc kubenswrapper[5002]: I1203 16:57:30.851438 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9acd1f9a-2dc7-4941-82a4-444e0d9787e5" path="/var/lib/kubelet/pods/9acd1f9a-2dc7-4941-82a4-444e0d9787e5/volumes" Dec 03 16:57:31 crc kubenswrapper[5002]: I1203 16:57:31.426191 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pqsn9" event={"ID":"6f02b254-7cb6-486d-87e8-6de21f6f176c","Type":"ContainerStarted","Data":"7c225b5fac46142bad70fe48eef9387b5772a87478afc995f4a18dd96c73cafd"} Dec 03 16:57:31 crc kubenswrapper[5002]: I1203 16:57:31.446985 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pqsn9" podStartSLOduration=1.7513588 podStartE2EDuration="9.446958721s" podCreationTimestamp="2025-12-03 16:57:22 +0000 UTC" firstStartedPulling="2025-12-03 16:57:23.348346921 +0000 UTC m=+1566.762168809" lastFinishedPulling="2025-12-03 16:57:31.043946842 +0000 UTC m=+1574.457768730" observedRunningTime="2025-12-03 16:57:31.443269091 +0000 UTC m=+1574.857091019" watchObservedRunningTime="2025-12-03 16:57:31.446958721 +0000 UTC m=+1574.860780649" Dec 03 16:57:32 crc kubenswrapper[5002]: I1203 16:57:32.825780 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:32 crc kubenswrapper[5002]: I1203 16:57:32.826118 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:33 crc kubenswrapper[5002]: I1203 16:57:33.999435 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-pqsn9" podUID="6f02b254-7cb6-486d-87e8-6de21f6f176c" containerName="registry-server" probeResult="failure" output=< Dec 03 16:57:33 crc kubenswrapper[5002]: timeout: failed to connect service ":50051" within 1s Dec 03 16:57:33 crc kubenswrapper[5002]: > Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.241485 5002 scope.go:117] "RemoveContainer" containerID="e6fe04e95facb561195160dc1144f3a8f2fe4ee8638cfabeb6dd821713c5e1fa" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.269243 5002 scope.go:117] "RemoveContainer" containerID="c1428bf30cb15b69700ca7aee245af99d1b61186e96d67ee9c68f2ca7bb320c7" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.327310 5002 scope.go:117] "RemoveContainer" containerID="e32836bab6a307b307c2e7ff2105f751a937d80481eef6d5b7b82ddfe55feee5" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.362727 5002 scope.go:117] "RemoveContainer" containerID="c82b727bf230901a00c848a679ec9867ca63d6aded4a708f898961e39f482557" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.397919 5002 scope.go:117] "RemoveContainer" containerID="698d9e5595f8e1839bbebfc094fe638d2eaedeb57d30e303003a2d5aef8e2b1e" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.425844 5002 scope.go:117] "RemoveContainer" containerID="03b89cb2436ac0c1009dc99d0feaff080ad885db9e3bda35aa9432496182be09" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.446771 5002 scope.go:117] "RemoveContainer" containerID="6da7b30f7c3a208596f000e6c7c17e6eb191359f5fb8ca48a30f22f6ee2a05c8" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.473177 5002 scope.go:117] "RemoveContainer" containerID="c2d24d81949b545d3ebc7b32c1c2b0e1fdeeedd331dbf8e044d0abff41ed4938" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.503465 5002 scope.go:117] "RemoveContainer" containerID="75263f4b67c20376ca0ea56d883165335fd8045a74dc621581aca247bdaa086c" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.521689 5002 scope.go:117] "RemoveContainer" containerID="90aabd49e1b960e251f35578f1d41446c037e761d8d73e0a913a8379ef640fd1" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.547099 5002 scope.go:117] "RemoveContainer" containerID="79a5eb86d5fe80c84e8ed1927b16af9e3b75324f11a9482bafaacf52ecc8aa98" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.567324 5002 scope.go:117] "RemoveContainer" containerID="b1585b78bd2cce3b724558c2b02007ceae28e9bf1620d79698ce26bf316b9690" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.590507 5002 scope.go:117] "RemoveContainer" containerID="0e6785c90ed2e6f4994bf2fbad1046ba48cc32cec76ca773e19797a995f30fbc" Dec 03 16:57:37 crc kubenswrapper[5002]: I1203 16:57:37.611957 5002 scope.go:117] "RemoveContainer" containerID="226a504aa244f14b7a6ca861038e85afe87ed5e79eaa96d1eaba5b7b497d94ff" Dec 03 16:57:42 crc kubenswrapper[5002]: I1203 16:57:42.864949 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:42 crc kubenswrapper[5002]: I1203 16:57:42.910714 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pqsn9" Dec 03 16:57:42 crc kubenswrapper[5002]: I1203 16:57:42.980322 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pqsn9"] Dec 03 16:57:43 crc kubenswrapper[5002]: I1203 16:57:43.107287 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6l48r"] Dec 03 16:57:43 crc kubenswrapper[5002]: I1203 16:57:43.107572 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6l48r" podUID="f1403248-15f1-4aa0-afba-bc2e29f01886" containerName="registry-server" containerID="cri-o://718625b2b8cd5a16d0d3cdfd83ada3f4f6fa7907a7191c343966de829fc3ca53" gracePeriod=2 Dec 03 16:57:44 crc kubenswrapper[5002]: I1203 16:57:44.565847 5002 generic.go:334] "Generic (PLEG): container finished" podID="f1403248-15f1-4aa0-afba-bc2e29f01886" containerID="718625b2b8cd5a16d0d3cdfd83ada3f4f6fa7907a7191c343966de829fc3ca53" exitCode=0 Dec 03 16:57:44 crc kubenswrapper[5002]: I1203 16:57:44.566499 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l48r" event={"ID":"f1403248-15f1-4aa0-afba-bc2e29f01886","Type":"ContainerDied","Data":"718625b2b8cd5a16d0d3cdfd83ada3f4f6fa7907a7191c343966de829fc3ca53"} Dec 03 16:57:44 crc kubenswrapper[5002]: I1203 16:57:44.776770 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:57:44 crc kubenswrapper[5002]: I1203 16:57:44.899900 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1403248-15f1-4aa0-afba-bc2e29f01886-utilities\") pod \"f1403248-15f1-4aa0-afba-bc2e29f01886\" (UID: \"f1403248-15f1-4aa0-afba-bc2e29f01886\") " Dec 03 16:57:44 crc kubenswrapper[5002]: I1203 16:57:44.899974 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq889\" (UniqueName: \"kubernetes.io/projected/f1403248-15f1-4aa0-afba-bc2e29f01886-kube-api-access-rq889\") pod \"f1403248-15f1-4aa0-afba-bc2e29f01886\" (UID: \"f1403248-15f1-4aa0-afba-bc2e29f01886\") " Dec 03 16:57:44 crc kubenswrapper[5002]: I1203 16:57:44.900027 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1403248-15f1-4aa0-afba-bc2e29f01886-catalog-content\") pod \"f1403248-15f1-4aa0-afba-bc2e29f01886\" (UID: \"f1403248-15f1-4aa0-afba-bc2e29f01886\") " Dec 03 16:57:44 crc kubenswrapper[5002]: I1203 16:57:44.901529 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1403248-15f1-4aa0-afba-bc2e29f01886-utilities" (OuterVolumeSpecName: "utilities") pod "f1403248-15f1-4aa0-afba-bc2e29f01886" (UID: "f1403248-15f1-4aa0-afba-bc2e29f01886"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:57:44 crc kubenswrapper[5002]: I1203 16:57:44.907824 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1403248-15f1-4aa0-afba-bc2e29f01886-kube-api-access-rq889" (OuterVolumeSpecName: "kube-api-access-rq889") pod "f1403248-15f1-4aa0-afba-bc2e29f01886" (UID: "f1403248-15f1-4aa0-afba-bc2e29f01886"). InnerVolumeSpecName "kube-api-access-rq889". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:57:44 crc kubenswrapper[5002]: I1203 16:57:44.955889 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1403248-15f1-4aa0-afba-bc2e29f01886-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1403248-15f1-4aa0-afba-bc2e29f01886" (UID: "f1403248-15f1-4aa0-afba-bc2e29f01886"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:57:45 crc kubenswrapper[5002]: I1203 16:57:45.001375 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1403248-15f1-4aa0-afba-bc2e29f01886-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:57:45 crc kubenswrapper[5002]: I1203 16:57:45.001412 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq889\" (UniqueName: \"kubernetes.io/projected/f1403248-15f1-4aa0-afba-bc2e29f01886-kube-api-access-rq889\") on node \"crc\" DevicePath \"\"" Dec 03 16:57:45 crc kubenswrapper[5002]: I1203 16:57:45.001425 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1403248-15f1-4aa0-afba-bc2e29f01886-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:57:45 crc kubenswrapper[5002]: I1203 16:57:45.574938 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6l48r" event={"ID":"f1403248-15f1-4aa0-afba-bc2e29f01886","Type":"ContainerDied","Data":"68559925817ad32e21f62ce727ff51f9b045a0b1f0d4504eae7a99905df96c1e"} Dec 03 16:57:45 crc kubenswrapper[5002]: I1203 16:57:45.575002 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6l48r" Dec 03 16:57:45 crc kubenswrapper[5002]: I1203 16:57:45.575010 5002 scope.go:117] "RemoveContainer" containerID="718625b2b8cd5a16d0d3cdfd83ada3f4f6fa7907a7191c343966de829fc3ca53" Dec 03 16:57:45 crc kubenswrapper[5002]: I1203 16:57:45.595730 5002 scope.go:117] "RemoveContainer" containerID="1d120120334ca64b3558bdca57f8b734fbced968e168a42766d3703c4b36955c" Dec 03 16:57:45 crc kubenswrapper[5002]: I1203 16:57:45.608458 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6l48r"] Dec 03 16:57:45 crc kubenswrapper[5002]: I1203 16:57:45.615343 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6l48r"] Dec 03 16:57:45 crc kubenswrapper[5002]: I1203 16:57:45.634784 5002 scope.go:117] "RemoveContainer" containerID="b052a386315c19bb6da7a52e8a48f786d44e73e8618e2cb6746a56b22b855306" Dec 03 16:57:46 crc kubenswrapper[5002]: I1203 16:57:46.850669 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1403248-15f1-4aa0-afba-bc2e29f01886" path="/var/lib/kubelet/pods/f1403248-15f1-4aa0-afba-bc2e29f01886/volumes" Dec 03 16:58:20 crc kubenswrapper[5002]: I1203 16:58:20.917257 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:58:20 crc kubenswrapper[5002]: I1203 16:58:20.917809 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:58:37 crc kubenswrapper[5002]: I1203 16:58:37.785352 5002 scope.go:117] "RemoveContainer" containerID="79224a40a4f46d3fa2c397f5179d0ff885c37571aae8f52f0e396327b7ecab0c" Dec 03 16:58:37 crc kubenswrapper[5002]: I1203 16:58:37.834916 5002 scope.go:117] "RemoveContainer" containerID="3de4372fa6435c63ccfedbb7394ede42734a75e8fd51c6d1fc2423cf3914ea49" Dec 03 16:58:37 crc kubenswrapper[5002]: I1203 16:58:37.859566 5002 scope.go:117] "RemoveContainer" containerID="f8a184e524d64b9dc097913b2900799e364ba6067295af6ea66de4aa4aaa7284" Dec 03 16:58:37 crc kubenswrapper[5002]: I1203 16:58:37.898324 5002 scope.go:117] "RemoveContainer" containerID="5923d9aee115cda041cea2387cd5646dd110c943ef7d4fb8ab87b7b965477538" Dec 03 16:58:37 crc kubenswrapper[5002]: I1203 16:58:37.947317 5002 scope.go:117] "RemoveContainer" containerID="0157e700031fbc06f5beb9f7261df49128aab542f40b669d4aa44d1b96123f77" Dec 03 16:58:37 crc kubenswrapper[5002]: I1203 16:58:37.972012 5002 scope.go:117] "RemoveContainer" containerID="f2dec3814f976a46c85e2eba759eb9becb8d98ee7d15d78dde0b17aeaa7f7b6c" Dec 03 16:58:38 crc kubenswrapper[5002]: I1203 16:58:38.022332 5002 scope.go:117] "RemoveContainer" containerID="5e5785af5ff0258b90954215a1483b5a6f295554f2fbe146b89f4c987082a912" Dec 03 16:58:38 crc kubenswrapper[5002]: I1203 16:58:38.044933 5002 scope.go:117] "RemoveContainer" containerID="791d6ff26ce51c06773571d5e750ad0115fba29c9dd734aac17971ba7ba34775" Dec 03 16:58:38 crc kubenswrapper[5002]: I1203 16:58:38.063653 5002 scope.go:117] "RemoveContainer" containerID="7b6d2d9ab3498afb3255849aed275053ff6ce7a6636c6b898dbde191016b281f" Dec 03 16:58:38 crc kubenswrapper[5002]: I1203 16:58:38.086345 5002 scope.go:117] "RemoveContainer" containerID="3dbb19429b734d264cde2745df385c89624a3724f50dda228f0605c865d01059" Dec 03 16:58:38 crc kubenswrapper[5002]: I1203 16:58:38.109615 5002 scope.go:117] "RemoveContainer" containerID="db108b919c7c5b3761fdde4e5ce0a4cbaee1b3800fbe8adf9e1e75b30601967a" Dec 03 16:58:38 crc kubenswrapper[5002]: I1203 16:58:38.138185 5002 scope.go:117] "RemoveContainer" containerID="36d298104f43453d78692e9f0dc49ecd5f2e7e541d036393de895d80b2edf9d1" Dec 03 16:58:38 crc kubenswrapper[5002]: I1203 16:58:38.164779 5002 scope.go:117] "RemoveContainer" containerID="00852d0e087cce30527deda079a968547452d5d48aaf7f1b080324ccf4304463" Dec 03 16:58:50 crc kubenswrapper[5002]: I1203 16:58:50.918118 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:58:50 crc kubenswrapper[5002]: I1203 16:58:50.918815 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:59:20 crc kubenswrapper[5002]: I1203 16:59:20.916587 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:59:20 crc kubenswrapper[5002]: I1203 16:59:20.917258 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:59:20 crc kubenswrapper[5002]: I1203 16:59:20.917350 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 16:59:20 crc kubenswrapper[5002]: I1203 16:59:20.918238 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:59:20 crc kubenswrapper[5002]: I1203 16:59:20.918294 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" gracePeriod=600 Dec 03 16:59:21 crc kubenswrapper[5002]: E1203 16:59:21.053722 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 16:59:21 crc kubenswrapper[5002]: I1203 16:59:21.484057 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" exitCode=0 Dec 03 16:59:21 crc kubenswrapper[5002]: I1203 16:59:21.484150 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e"} Dec 03 16:59:21 crc kubenswrapper[5002]: I1203 16:59:21.484253 5002 scope.go:117] "RemoveContainer" containerID="499c319fe6708e676b3e6316237c177a723be8309f300373241f6343d1f6ef57" Dec 03 16:59:21 crc kubenswrapper[5002]: I1203 16:59:21.486710 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 16:59:21 crc kubenswrapper[5002]: E1203 16:59:21.487354 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 16:59:36 crc kubenswrapper[5002]: I1203 16:59:36.844289 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 16:59:36 crc kubenswrapper[5002]: E1203 16:59:36.845072 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 16:59:38 crc kubenswrapper[5002]: I1203 16:59:38.415808 5002 scope.go:117] "RemoveContainer" containerID="7f3da2aabf2363fa02a6cad9db5326f4c4f7cfa23643d11c53c73be40df7d4c4" Dec 03 16:59:38 crc kubenswrapper[5002]: I1203 16:59:38.450086 5002 scope.go:117] "RemoveContainer" containerID="4135f456101b8d8bc1d6aa3e05e38d4e5f0009555d8fd482836038f10a45877a" Dec 03 16:59:38 crc kubenswrapper[5002]: I1203 16:59:38.484917 5002 scope.go:117] "RemoveContainer" containerID="4bc8f24bf14262c55e9f63c9738562230246183c67d42270335aee3f4e625213" Dec 03 16:59:38 crc kubenswrapper[5002]: I1203 16:59:38.510237 5002 scope.go:117] "RemoveContainer" containerID="b49eb05bae1106d0f92766371ad241b971af33c597ee16aa00aa4a7e5ac37b88" Dec 03 16:59:38 crc kubenswrapper[5002]: I1203 16:59:38.536721 5002 scope.go:117] "RemoveContainer" containerID="9d77ab2291660608c77e2f3623a3656df2f34d2c652d9964617af8f0a234719c" Dec 03 16:59:38 crc kubenswrapper[5002]: I1203 16:59:38.563950 5002 scope.go:117] "RemoveContainer" containerID="02d099c7b5a52a32907ff960d5be05cd37532b1c4db658f445b99342fec6b927" Dec 03 16:59:38 crc kubenswrapper[5002]: I1203 16:59:38.594231 5002 scope.go:117] "RemoveContainer" containerID="e81a6b5b2d06af54cddc4be90e4d4a13ac27bedc2032aaec6cf75ef0057328dd" Dec 03 16:59:38 crc kubenswrapper[5002]: I1203 16:59:38.618204 5002 scope.go:117] "RemoveContainer" containerID="baf09ec78f1fa62fb965a5e1d80a324a472bde92f7bf1614c38cde3288421bd3" Dec 03 16:59:51 crc kubenswrapper[5002]: I1203 16:59:51.840204 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 16:59:51 crc kubenswrapper[5002]: E1203 16:59:51.840958 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.162340 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl"] Dec 03 17:00:00 crc kubenswrapper[5002]: E1203 17:00:00.165461 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9acd1f9a-2dc7-4941-82a4-444e0d9787e5" containerName="registry-server" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.165497 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9acd1f9a-2dc7-4941-82a4-444e0d9787e5" containerName="registry-server" Dec 03 17:00:00 crc kubenswrapper[5002]: E1203 17:00:00.165527 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9acd1f9a-2dc7-4941-82a4-444e0d9787e5" containerName="extract-content" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.165536 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9acd1f9a-2dc7-4941-82a4-444e0d9787e5" containerName="extract-content" Dec 03 17:00:00 crc kubenswrapper[5002]: E1203 17:00:00.165553 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1403248-15f1-4aa0-afba-bc2e29f01886" containerName="extract-utilities" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.165567 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1403248-15f1-4aa0-afba-bc2e29f01886" containerName="extract-utilities" Dec 03 17:00:00 crc kubenswrapper[5002]: E1203 17:00:00.165587 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9acd1f9a-2dc7-4941-82a4-444e0d9787e5" containerName="extract-utilities" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.165596 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9acd1f9a-2dc7-4941-82a4-444e0d9787e5" containerName="extract-utilities" Dec 03 17:00:00 crc kubenswrapper[5002]: E1203 17:00:00.165614 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1403248-15f1-4aa0-afba-bc2e29f01886" containerName="extract-content" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.165622 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1403248-15f1-4aa0-afba-bc2e29f01886" containerName="extract-content" Dec 03 17:00:00 crc kubenswrapper[5002]: E1203 17:00:00.165642 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1403248-15f1-4aa0-afba-bc2e29f01886" containerName="registry-server" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.165653 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1403248-15f1-4aa0-afba-bc2e29f01886" containerName="registry-server" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.166099 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9acd1f9a-2dc7-4941-82a4-444e0d9787e5" containerName="registry-server" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.166121 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1403248-15f1-4aa0-afba-bc2e29f01886" containerName="registry-server" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.167080 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.169912 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.170411 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.197677 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-config-volume\") pod \"collect-profiles-29413020-5jzjl\" (UID: \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.197735 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl"] Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.197768 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txzhz\" (UniqueName: \"kubernetes.io/projected/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-kube-api-access-txzhz\") pod \"collect-profiles-29413020-5jzjl\" (UID: \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.197817 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-secret-volume\") pod \"collect-profiles-29413020-5jzjl\" (UID: \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.298974 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-config-volume\") pod \"collect-profiles-29413020-5jzjl\" (UID: \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.299056 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txzhz\" (UniqueName: \"kubernetes.io/projected/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-kube-api-access-txzhz\") pod \"collect-profiles-29413020-5jzjl\" (UID: \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.299099 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-secret-volume\") pod \"collect-profiles-29413020-5jzjl\" (UID: \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.300728 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-config-volume\") pod \"collect-profiles-29413020-5jzjl\" (UID: \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.307289 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-secret-volume\") pod \"collect-profiles-29413020-5jzjl\" (UID: \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.319239 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txzhz\" (UniqueName: \"kubernetes.io/projected/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-kube-api-access-txzhz\") pod \"collect-profiles-29413020-5jzjl\" (UID: \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.509027 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:00 crc kubenswrapper[5002]: I1203 17:00:00.988337 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl"] Dec 03 17:00:01 crc kubenswrapper[5002]: W1203 17:00:00.998959 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9cb54a7e_8f20_4ea7_8ecc_83ec589a2cd9.slice/crio-992de4f362adcc6e31ee4583320c4467cc37f7920e5bd73b7da8637ff09c39cf WatchSource:0}: Error finding container 992de4f362adcc6e31ee4583320c4467cc37f7920e5bd73b7da8637ff09c39cf: Status 404 returned error can't find the container with id 992de4f362adcc6e31ee4583320c4467cc37f7920e5bd73b7da8637ff09c39cf Dec 03 17:00:01 crc kubenswrapper[5002]: I1203 17:00:01.857428 5002 generic.go:334] "Generic (PLEG): container finished" podID="9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9" containerID="f993cadd821fb3a410a133b7617f5f1ee7d7a3f9f06159ef576da405cac3d017" exitCode=0 Dec 03 17:00:01 crc kubenswrapper[5002]: I1203 17:00:01.857714 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" event={"ID":"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9","Type":"ContainerDied","Data":"f993cadd821fb3a410a133b7617f5f1ee7d7a3f9f06159ef576da405cac3d017"} Dec 03 17:00:01 crc kubenswrapper[5002]: I1203 17:00:01.857807 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" event={"ID":"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9","Type":"ContainerStarted","Data":"992de4f362adcc6e31ee4583320c4467cc37f7920e5bd73b7da8637ff09c39cf"} Dec 03 17:00:02 crc kubenswrapper[5002]: I1203 17:00:02.840710 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:00:02 crc kubenswrapper[5002]: E1203 17:00:02.841172 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.131593 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.237723 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-secret-volume\") pod \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\" (UID: \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\") " Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.237907 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txzhz\" (UniqueName: \"kubernetes.io/projected/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-kube-api-access-txzhz\") pod \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\" (UID: \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\") " Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.238695 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-config-volume\") pod \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\" (UID: \"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9\") " Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.239250 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-config-volume" (OuterVolumeSpecName: "config-volume") pod "9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9" (UID: "9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.243134 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9" (UID: "9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.243795 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-kube-api-access-txzhz" (OuterVolumeSpecName: "kube-api-access-txzhz") pod "9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9" (UID: "9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9"). InnerVolumeSpecName "kube-api-access-txzhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.339927 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.339963 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.339973 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txzhz\" (UniqueName: \"kubernetes.io/projected/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9-kube-api-access-txzhz\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.873574 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" event={"ID":"9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9","Type":"ContainerDied","Data":"992de4f362adcc6e31ee4583320c4467cc37f7920e5bd73b7da8637ff09c39cf"} Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.873621 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="992de4f362adcc6e31ee4583320c4467cc37f7920e5bd73b7da8637ff09c39cf" Dec 03 17:00:03 crc kubenswrapper[5002]: I1203 17:00:03.873664 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl" Dec 03 17:00:16 crc kubenswrapper[5002]: I1203 17:00:16.847212 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:00:16 crc kubenswrapper[5002]: E1203 17:00:16.848191 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:00:30 crc kubenswrapper[5002]: I1203 17:00:30.840598 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:00:30 crc kubenswrapper[5002]: E1203 17:00:30.841648 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:00:38 crc kubenswrapper[5002]: I1203 17:00:38.725337 5002 scope.go:117] "RemoveContainer" containerID="83bc12a265e0074e224d6cf33b500361e2aac9770f8cdf19b1148ec59fa48f6e" Dec 03 17:00:38 crc kubenswrapper[5002]: I1203 17:00:38.746948 5002 scope.go:117] "RemoveContainer" containerID="115c8c772f00cb96dcd1beb56316398ca922e717a486f1720cd17f7b222565ff" Dec 03 17:00:38 crc kubenswrapper[5002]: I1203 17:00:38.789488 5002 scope.go:117] "RemoveContainer" containerID="71eeef36c6f1dadc59d3a781bfcbfbd8460dc45c062387e637b722d598dd4705" Dec 03 17:00:38 crc kubenswrapper[5002]: I1203 17:00:38.810969 5002 scope.go:117] "RemoveContainer" containerID="f8619d4dd7755cc1678293835a839ff1fae1a4ef2ee2b2138b7794279ebcef54" Dec 03 17:00:38 crc kubenswrapper[5002]: I1203 17:00:38.845054 5002 scope.go:117] "RemoveContainer" containerID="7b40bf8ba185db9fe3f624f36e84fbc575c0cd6f08a6e739a0f0496bd507ae1f" Dec 03 17:00:38 crc kubenswrapper[5002]: I1203 17:00:38.870310 5002 scope.go:117] "RemoveContainer" containerID="494ae1cdce3c8bc209ceb1c212726454d0a091e15dcb7b7aecd3e12a2e90df3e" Dec 03 17:00:38 crc kubenswrapper[5002]: I1203 17:00:38.897670 5002 scope.go:117] "RemoveContainer" containerID="c5d0ede319ccf188efb27569b868b26ca5f84ea64bed7ce6823e0f0aa90d0477" Dec 03 17:00:38 crc kubenswrapper[5002]: I1203 17:00:38.925314 5002 scope.go:117] "RemoveContainer" containerID="be0b486777dc09b33af931a7bba8f0f3a49cbd01154dc708e1b7333dca4661eb" Dec 03 17:00:38 crc kubenswrapper[5002]: I1203 17:00:38.955618 5002 scope.go:117] "RemoveContainer" containerID="c572cd9c1af6f40e03b071df88c96ebaa895f94c1cd6af90c6351a04eb304599" Dec 03 17:00:38 crc kubenswrapper[5002]: I1203 17:00:38.989187 5002 scope.go:117] "RemoveContainer" containerID="fcdafeed97d1bf71baec2b1679ea4a83c44d81bf0870a234df0c8a45883f5c37" Dec 03 17:00:39 crc kubenswrapper[5002]: I1203 17:00:39.010421 5002 scope.go:117] "RemoveContainer" containerID="e155877081445eee5138996cc287338fe1fb77ea902023396c1de90698d90b16" Dec 03 17:00:39 crc kubenswrapper[5002]: I1203 17:00:39.035185 5002 scope.go:117] "RemoveContainer" containerID="36cff69dfad7c31954e2260b65d28d76d76ad21b2a3da4e43e99aaca2171482c" Dec 03 17:00:42 crc kubenswrapper[5002]: I1203 17:00:42.840306 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:00:42 crc kubenswrapper[5002]: E1203 17:00:42.840885 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:00:54 crc kubenswrapper[5002]: I1203 17:00:54.840854 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:00:54 crc kubenswrapper[5002]: E1203 17:00:54.841660 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:01:06 crc kubenswrapper[5002]: I1203 17:01:06.848682 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:01:06 crc kubenswrapper[5002]: E1203 17:01:06.850137 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:01:19 crc kubenswrapper[5002]: I1203 17:01:19.841114 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:01:19 crc kubenswrapper[5002]: E1203 17:01:19.842515 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:01:32 crc kubenswrapper[5002]: I1203 17:01:32.840931 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:01:32 crc kubenswrapper[5002]: E1203 17:01:32.841810 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:01:39 crc kubenswrapper[5002]: I1203 17:01:39.220964 5002 scope.go:117] "RemoveContainer" containerID="dfddf060fa4fddc35fda40088529fbf414441a192eea714876824df928864e7e" Dec 03 17:01:39 crc kubenswrapper[5002]: I1203 17:01:39.245007 5002 scope.go:117] "RemoveContainer" containerID="f170d2d5c5453f499381793538a1774fccdd3cc83fae41dd5c62f54865d69fea" Dec 03 17:01:39 crc kubenswrapper[5002]: I1203 17:01:39.267192 5002 scope.go:117] "RemoveContainer" containerID="b8df45427a97ecede50bedecdafce707afb7a6c3b99bed2b3b06ade3f707f8c0" Dec 03 17:01:39 crc kubenswrapper[5002]: I1203 17:01:39.281866 5002 scope.go:117] "RemoveContainer" containerID="739bab8b7d0739633fc83f4a91f4b03d89579795c296b2c49a39101f7359c721" Dec 03 17:01:39 crc kubenswrapper[5002]: I1203 17:01:39.297586 5002 scope.go:117] "RemoveContainer" containerID="3dea21c55187632d7d42ec51fafaac9c2d306b2b50b5e53b5794f26fbc124243" Dec 03 17:01:45 crc kubenswrapper[5002]: I1203 17:01:45.839992 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:01:45 crc kubenswrapper[5002]: E1203 17:01:45.840416 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:01:59 crc kubenswrapper[5002]: I1203 17:01:59.841467 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:01:59 crc kubenswrapper[5002]: E1203 17:01:59.842560 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:02:13 crc kubenswrapper[5002]: I1203 17:02:13.841038 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:02:13 crc kubenswrapper[5002]: E1203 17:02:13.841867 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:02:28 crc kubenswrapper[5002]: I1203 17:02:28.841125 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:02:28 crc kubenswrapper[5002]: E1203 17:02:28.842007 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:02:39 crc kubenswrapper[5002]: I1203 17:02:39.375354 5002 scope.go:117] "RemoveContainer" containerID="480ec2b533abe5221031de6e27a59835f1f69c022a454f99a2fbd94cdb9e4c76" Dec 03 17:02:39 crc kubenswrapper[5002]: I1203 17:02:39.411952 5002 scope.go:117] "RemoveContainer" containerID="9f8d3e7dd1232b8bed9118f1348182183eb70011fc8ae05a19a411172dade4b4" Dec 03 17:02:39 crc kubenswrapper[5002]: I1203 17:02:39.461491 5002 scope.go:117] "RemoveContainer" containerID="be947ce6345fc6664a5a4f6598b88db3c63f4191808ff50a2479b4e6f82fb2a5" Dec 03 17:02:39 crc kubenswrapper[5002]: I1203 17:02:39.491554 5002 scope.go:117] "RemoveContainer" containerID="b31ca3192535ab4be61b9eb6074bc739b997b6f06d2c9f4e0e1cf060deb5622c" Dec 03 17:02:39 crc kubenswrapper[5002]: I1203 17:02:39.522249 5002 scope.go:117] "RemoveContainer" containerID="73c4d49306ef8821fe226706408e794a8c85e94b5167824f65c0f4a8a0ad2be4" Dec 03 17:02:39 crc kubenswrapper[5002]: I1203 17:02:39.547104 5002 scope.go:117] "RemoveContainer" containerID="5a4ccb5005877315a1ce91ddaced6dd35310247cd441e54f0d01164a98a72004" Dec 03 17:02:43 crc kubenswrapper[5002]: I1203 17:02:43.841333 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:02:43 crc kubenswrapper[5002]: E1203 17:02:43.842551 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:02:57 crc kubenswrapper[5002]: I1203 17:02:57.839887 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:02:57 crc kubenswrapper[5002]: E1203 17:02:57.840586 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:03:11 crc kubenswrapper[5002]: I1203 17:03:11.841153 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:03:11 crc kubenswrapper[5002]: E1203 17:03:11.842056 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:03:26 crc kubenswrapper[5002]: I1203 17:03:26.868235 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:03:26 crc kubenswrapper[5002]: E1203 17:03:26.869131 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:03:39 crc kubenswrapper[5002]: I1203 17:03:39.841299 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:03:39 crc kubenswrapper[5002]: E1203 17:03:39.842855 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:03:50 crc kubenswrapper[5002]: I1203 17:03:50.841137 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:03:50 crc kubenswrapper[5002]: E1203 17:03:50.841970 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:04:05 crc kubenswrapper[5002]: I1203 17:04:05.840000 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:04:05 crc kubenswrapper[5002]: E1203 17:04:05.840837 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:04:19 crc kubenswrapper[5002]: I1203 17:04:19.840588 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:04:19 crc kubenswrapper[5002]: E1203 17:04:19.841938 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.226561 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-87t2t"] Dec 03 17:04:21 crc kubenswrapper[5002]: E1203 17:04:21.227066 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9" containerName="collect-profiles" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.227087 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9" containerName="collect-profiles" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.227301 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9" containerName="collect-profiles" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.228795 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.236919 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-87t2t"] Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.361545 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7lb9\" (UniqueName: \"kubernetes.io/projected/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-kube-api-access-x7lb9\") pod \"redhat-operators-87t2t\" (UID: \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\") " pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.361616 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-catalog-content\") pod \"redhat-operators-87t2t\" (UID: \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\") " pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.361822 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-utilities\") pod \"redhat-operators-87t2t\" (UID: \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\") " pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.463398 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7lb9\" (UniqueName: \"kubernetes.io/projected/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-kube-api-access-x7lb9\") pod \"redhat-operators-87t2t\" (UID: \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\") " pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.463470 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-catalog-content\") pod \"redhat-operators-87t2t\" (UID: \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\") " pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.463519 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-utilities\") pod \"redhat-operators-87t2t\" (UID: \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\") " pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.463977 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-utilities\") pod \"redhat-operators-87t2t\" (UID: \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\") " pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.464303 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-catalog-content\") pod \"redhat-operators-87t2t\" (UID: \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\") " pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.485036 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7lb9\" (UniqueName: \"kubernetes.io/projected/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-kube-api-access-x7lb9\") pod \"redhat-operators-87t2t\" (UID: \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\") " pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.564891 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.819637 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-87t2t"] Dec 03 17:04:21 crc kubenswrapper[5002]: I1203 17:04:21.975649 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-87t2t" event={"ID":"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6","Type":"ContainerStarted","Data":"ea9558b7c2d30b030f6fef83b40bcb14e7a8929d95d0030d4b23e419341ce07a"} Dec 03 17:04:22 crc kubenswrapper[5002]: I1203 17:04:22.986117 5002 generic.go:334] "Generic (PLEG): container finished" podID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" containerID="116bf7fb663193bcd3e549fb44563d4680d6e7745afc99115a8dacdda0ce3ae4" exitCode=0 Dec 03 17:04:22 crc kubenswrapper[5002]: I1203 17:04:22.986172 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-87t2t" event={"ID":"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6","Type":"ContainerDied","Data":"116bf7fb663193bcd3e549fb44563d4680d6e7745afc99115a8dacdda0ce3ae4"} Dec 03 17:04:22 crc kubenswrapper[5002]: I1203 17:04:22.990307 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:04:25 crc kubenswrapper[5002]: I1203 17:04:25.004229 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-87t2t" event={"ID":"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6","Type":"ContainerStarted","Data":"53c344e3cd3fcd4dd0105509afe47e440be7fdafa5ff84478bc53b0550a48c2e"} Dec 03 17:04:26 crc kubenswrapper[5002]: I1203 17:04:26.013989 5002 generic.go:334] "Generic (PLEG): container finished" podID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" containerID="53c344e3cd3fcd4dd0105509afe47e440be7fdafa5ff84478bc53b0550a48c2e" exitCode=0 Dec 03 17:04:26 crc kubenswrapper[5002]: I1203 17:04:26.014040 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-87t2t" event={"ID":"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6","Type":"ContainerDied","Data":"53c344e3cd3fcd4dd0105509afe47e440be7fdafa5ff84478bc53b0550a48c2e"} Dec 03 17:04:28 crc kubenswrapper[5002]: I1203 17:04:28.028873 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-87t2t" event={"ID":"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6","Type":"ContainerStarted","Data":"951f6500a9ab7c9778cd648efe41460d5b111e3d37ef6e9a2269892d3f1a6aea"} Dec 03 17:04:28 crc kubenswrapper[5002]: I1203 17:04:28.051965 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-87t2t" podStartSLOduration=2.937310886 podStartE2EDuration="7.051943068s" podCreationTimestamp="2025-12-03 17:04:21 +0000 UTC" firstStartedPulling="2025-12-03 17:04:22.989317845 +0000 UTC m=+1986.403139733" lastFinishedPulling="2025-12-03 17:04:27.103950027 +0000 UTC m=+1990.517771915" observedRunningTime="2025-12-03 17:04:28.045988624 +0000 UTC m=+1991.459810522" watchObservedRunningTime="2025-12-03 17:04:28.051943068 +0000 UTC m=+1991.465764956" Dec 03 17:04:31 crc kubenswrapper[5002]: I1203 17:04:31.566095 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:31 crc kubenswrapper[5002]: I1203 17:04:31.566398 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:32 crc kubenswrapper[5002]: I1203 17:04:32.619416 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-87t2t" podUID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" containerName="registry-server" probeResult="failure" output=< Dec 03 17:04:32 crc kubenswrapper[5002]: timeout: failed to connect service ":50051" within 1s Dec 03 17:04:32 crc kubenswrapper[5002]: > Dec 03 17:04:33 crc kubenswrapper[5002]: I1203 17:04:33.841003 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:04:41 crc kubenswrapper[5002]: I1203 17:04:41.645351 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:41 crc kubenswrapper[5002]: I1203 17:04:41.727183 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:41 crc kubenswrapper[5002]: I1203 17:04:41.884134 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-87t2t"] Dec 03 17:04:43 crc kubenswrapper[5002]: I1203 17:04:43.147317 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-87t2t" podUID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" containerName="registry-server" containerID="cri-o://951f6500a9ab7c9778cd648efe41460d5b111e3d37ef6e9a2269892d3f1a6aea" gracePeriod=2 Dec 03 17:04:44 crc kubenswrapper[5002]: I1203 17:04:44.156148 5002 generic.go:334] "Generic (PLEG): container finished" podID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" containerID="951f6500a9ab7c9778cd648efe41460d5b111e3d37ef6e9a2269892d3f1a6aea" exitCode=0 Dec 03 17:04:44 crc kubenswrapper[5002]: I1203 17:04:44.156205 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-87t2t" event={"ID":"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6","Type":"ContainerDied","Data":"951f6500a9ab7c9778cd648efe41460d5b111e3d37ef6e9a2269892d3f1a6aea"} Dec 03 17:04:44 crc kubenswrapper[5002]: I1203 17:04:44.158913 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"a0bfa093eae7ea75c7d379907ed03f32918cd5d417daedcc012cf138d236f7d8"} Dec 03 17:04:45 crc kubenswrapper[5002]: I1203 17:04:45.232890 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:45 crc kubenswrapper[5002]: I1203 17:04:45.424158 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-catalog-content\") pod \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\" (UID: \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\") " Dec 03 17:04:45 crc kubenswrapper[5002]: I1203 17:04:45.424213 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-utilities\") pod \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\" (UID: \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\") " Dec 03 17:04:45 crc kubenswrapper[5002]: I1203 17:04:45.424252 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7lb9\" (UniqueName: \"kubernetes.io/projected/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-kube-api-access-x7lb9\") pod \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\" (UID: \"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6\") " Dec 03 17:04:45 crc kubenswrapper[5002]: I1203 17:04:45.425873 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-utilities" (OuterVolumeSpecName: "utilities") pod "c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" (UID: "c86f2f89-b3f6-4f5b-ae11-43c656f40ad6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:04:45 crc kubenswrapper[5002]: I1203 17:04:45.429966 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-kube-api-access-x7lb9" (OuterVolumeSpecName: "kube-api-access-x7lb9") pod "c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" (UID: "c86f2f89-b3f6-4f5b-ae11-43c656f40ad6"). InnerVolumeSpecName "kube-api-access-x7lb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:04:45 crc kubenswrapper[5002]: I1203 17:04:45.525853 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:04:45 crc kubenswrapper[5002]: I1203 17:04:45.525925 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7lb9\" (UniqueName: \"kubernetes.io/projected/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-kube-api-access-x7lb9\") on node \"crc\" DevicePath \"\"" Dec 03 17:04:45 crc kubenswrapper[5002]: I1203 17:04:45.547071 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" (UID: "c86f2f89-b3f6-4f5b-ae11-43c656f40ad6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:04:45 crc kubenswrapper[5002]: I1203 17:04:45.627401 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:04:46 crc kubenswrapper[5002]: I1203 17:04:46.177334 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-87t2t" event={"ID":"c86f2f89-b3f6-4f5b-ae11-43c656f40ad6","Type":"ContainerDied","Data":"ea9558b7c2d30b030f6fef83b40bcb14e7a8929d95d0030d4b23e419341ce07a"} Dec 03 17:04:46 crc kubenswrapper[5002]: I1203 17:04:46.177675 5002 scope.go:117] "RemoveContainer" containerID="951f6500a9ab7c9778cd648efe41460d5b111e3d37ef6e9a2269892d3f1a6aea" Dec 03 17:04:46 crc kubenswrapper[5002]: I1203 17:04:46.177417 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-87t2t" Dec 03 17:04:46 crc kubenswrapper[5002]: I1203 17:04:46.208430 5002 scope.go:117] "RemoveContainer" containerID="53c344e3cd3fcd4dd0105509afe47e440be7fdafa5ff84478bc53b0550a48c2e" Dec 03 17:04:46 crc kubenswrapper[5002]: I1203 17:04:46.211641 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-87t2t"] Dec 03 17:04:46 crc kubenswrapper[5002]: I1203 17:04:46.221079 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-87t2t"] Dec 03 17:04:46 crc kubenswrapper[5002]: I1203 17:04:46.236118 5002 scope.go:117] "RemoveContainer" containerID="116bf7fb663193bcd3e549fb44563d4680d6e7745afc99115a8dacdda0ce3ae4" Dec 03 17:04:46 crc kubenswrapper[5002]: I1203 17:04:46.853492 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" path="/var/lib/kubelet/pods/c86f2f89-b3f6-4f5b-ae11-43c656f40ad6/volumes" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.455297 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9v7wj"] Dec 03 17:05:37 crc kubenswrapper[5002]: E1203 17:05:37.456339 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" containerName="extract-content" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.456352 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" containerName="extract-content" Dec 03 17:05:37 crc kubenswrapper[5002]: E1203 17:05:37.456371 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" containerName="registry-server" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.456377 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" containerName="registry-server" Dec 03 17:05:37 crc kubenswrapper[5002]: E1203 17:05:37.456394 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" containerName="extract-utilities" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.456400 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" containerName="extract-utilities" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.456560 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c86f2f89-b3f6-4f5b-ae11-43c656f40ad6" containerName="registry-server" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.457694 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.463274 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9v7wj"] Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.557916 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45851133-0cc9-4714-9dea-6b707508a5f6-utilities\") pod \"redhat-marketplace-9v7wj\" (UID: \"45851133-0cc9-4714-9dea-6b707508a5f6\") " pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.558044 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2pv5\" (UniqueName: \"kubernetes.io/projected/45851133-0cc9-4714-9dea-6b707508a5f6-kube-api-access-d2pv5\") pod \"redhat-marketplace-9v7wj\" (UID: \"45851133-0cc9-4714-9dea-6b707508a5f6\") " pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.558180 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45851133-0cc9-4714-9dea-6b707508a5f6-catalog-content\") pod \"redhat-marketplace-9v7wj\" (UID: \"45851133-0cc9-4714-9dea-6b707508a5f6\") " pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.659548 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45851133-0cc9-4714-9dea-6b707508a5f6-catalog-content\") pod \"redhat-marketplace-9v7wj\" (UID: \"45851133-0cc9-4714-9dea-6b707508a5f6\") " pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.659636 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45851133-0cc9-4714-9dea-6b707508a5f6-utilities\") pod \"redhat-marketplace-9v7wj\" (UID: \"45851133-0cc9-4714-9dea-6b707508a5f6\") " pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.659697 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2pv5\" (UniqueName: \"kubernetes.io/projected/45851133-0cc9-4714-9dea-6b707508a5f6-kube-api-access-d2pv5\") pod \"redhat-marketplace-9v7wj\" (UID: \"45851133-0cc9-4714-9dea-6b707508a5f6\") " pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.660180 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45851133-0cc9-4714-9dea-6b707508a5f6-catalog-content\") pod \"redhat-marketplace-9v7wj\" (UID: \"45851133-0cc9-4714-9dea-6b707508a5f6\") " pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.660269 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45851133-0cc9-4714-9dea-6b707508a5f6-utilities\") pod \"redhat-marketplace-9v7wj\" (UID: \"45851133-0cc9-4714-9dea-6b707508a5f6\") " pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.682835 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2pv5\" (UniqueName: \"kubernetes.io/projected/45851133-0cc9-4714-9dea-6b707508a5f6-kube-api-access-d2pv5\") pod \"redhat-marketplace-9v7wj\" (UID: \"45851133-0cc9-4714-9dea-6b707508a5f6\") " pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:37 crc kubenswrapper[5002]: I1203 17:05:37.777844 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:38 crc kubenswrapper[5002]: I1203 17:05:38.217467 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9v7wj"] Dec 03 17:05:38 crc kubenswrapper[5002]: I1203 17:05:38.569548 5002 generic.go:334] "Generic (PLEG): container finished" podID="45851133-0cc9-4714-9dea-6b707508a5f6" containerID="0b7a2af74abfe7563f7c10ec09e12308b6872dbfb68211144b3d59790804c049" exitCode=0 Dec 03 17:05:38 crc kubenswrapper[5002]: I1203 17:05:38.569867 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9v7wj" event={"ID":"45851133-0cc9-4714-9dea-6b707508a5f6","Type":"ContainerDied","Data":"0b7a2af74abfe7563f7c10ec09e12308b6872dbfb68211144b3d59790804c049"} Dec 03 17:05:38 crc kubenswrapper[5002]: I1203 17:05:38.569899 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9v7wj" event={"ID":"45851133-0cc9-4714-9dea-6b707508a5f6","Type":"ContainerStarted","Data":"fad1984b46c13121dfa2aeade4f6788915914b04d86892fbc0cf206f58e5b1ae"} Dec 03 17:05:39 crc kubenswrapper[5002]: I1203 17:05:39.580608 5002 generic.go:334] "Generic (PLEG): container finished" podID="45851133-0cc9-4714-9dea-6b707508a5f6" containerID="73eb650e6161828954ebaf42a2a72cd050139ff5ad0fb673e1e0d099eeda0986" exitCode=0 Dec 03 17:05:39 crc kubenswrapper[5002]: I1203 17:05:39.580661 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9v7wj" event={"ID":"45851133-0cc9-4714-9dea-6b707508a5f6","Type":"ContainerDied","Data":"73eb650e6161828954ebaf42a2a72cd050139ff5ad0fb673e1e0d099eeda0986"} Dec 03 17:05:40 crc kubenswrapper[5002]: I1203 17:05:40.591526 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9v7wj" event={"ID":"45851133-0cc9-4714-9dea-6b707508a5f6","Type":"ContainerStarted","Data":"f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04"} Dec 03 17:05:40 crc kubenswrapper[5002]: I1203 17:05:40.614490 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9v7wj" podStartSLOduration=2.17993392 podStartE2EDuration="3.614468791s" podCreationTimestamp="2025-12-03 17:05:37 +0000 UTC" firstStartedPulling="2025-12-03 17:05:38.572039523 +0000 UTC m=+2061.985861401" lastFinishedPulling="2025-12-03 17:05:40.006574384 +0000 UTC m=+2063.420396272" observedRunningTime="2025-12-03 17:05:40.60862569 +0000 UTC m=+2064.022447598" watchObservedRunningTime="2025-12-03 17:05:40.614468791 +0000 UTC m=+2064.028290679" Dec 03 17:05:47 crc kubenswrapper[5002]: I1203 17:05:47.778334 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:47 crc kubenswrapper[5002]: I1203 17:05:47.779088 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:47 crc kubenswrapper[5002]: I1203 17:05:47.835579 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:48 crc kubenswrapper[5002]: I1203 17:05:48.693217 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:48 crc kubenswrapper[5002]: I1203 17:05:48.747392 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9v7wj"] Dec 03 17:05:50 crc kubenswrapper[5002]: I1203 17:05:50.689683 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9v7wj" podUID="45851133-0cc9-4714-9dea-6b707508a5f6" containerName="registry-server" containerID="cri-o://f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04" gracePeriod=2 Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.612382 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.699054 5002 generic.go:334] "Generic (PLEG): container finished" podID="45851133-0cc9-4714-9dea-6b707508a5f6" containerID="f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04" exitCode=0 Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.699108 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9v7wj" event={"ID":"45851133-0cc9-4714-9dea-6b707508a5f6","Type":"ContainerDied","Data":"f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04"} Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.699122 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9v7wj" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.699162 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9v7wj" event={"ID":"45851133-0cc9-4714-9dea-6b707508a5f6","Type":"ContainerDied","Data":"fad1984b46c13121dfa2aeade4f6788915914b04d86892fbc0cf206f58e5b1ae"} Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.699199 5002 scope.go:117] "RemoveContainer" containerID="f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.736634 5002 scope.go:117] "RemoveContainer" containerID="73eb650e6161828954ebaf42a2a72cd050139ff5ad0fb673e1e0d099eeda0986" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.754704 5002 scope.go:117] "RemoveContainer" containerID="0b7a2af74abfe7563f7c10ec09e12308b6872dbfb68211144b3d59790804c049" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.765620 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45851133-0cc9-4714-9dea-6b707508a5f6-utilities\") pod \"45851133-0cc9-4714-9dea-6b707508a5f6\" (UID: \"45851133-0cc9-4714-9dea-6b707508a5f6\") " Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.765707 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45851133-0cc9-4714-9dea-6b707508a5f6-catalog-content\") pod \"45851133-0cc9-4714-9dea-6b707508a5f6\" (UID: \"45851133-0cc9-4714-9dea-6b707508a5f6\") " Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.765849 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2pv5\" (UniqueName: \"kubernetes.io/projected/45851133-0cc9-4714-9dea-6b707508a5f6-kube-api-access-d2pv5\") pod \"45851133-0cc9-4714-9dea-6b707508a5f6\" (UID: \"45851133-0cc9-4714-9dea-6b707508a5f6\") " Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.766775 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45851133-0cc9-4714-9dea-6b707508a5f6-utilities" (OuterVolumeSpecName: "utilities") pod "45851133-0cc9-4714-9dea-6b707508a5f6" (UID: "45851133-0cc9-4714-9dea-6b707508a5f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.778213 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45851133-0cc9-4714-9dea-6b707508a5f6-kube-api-access-d2pv5" (OuterVolumeSpecName: "kube-api-access-d2pv5") pod "45851133-0cc9-4714-9dea-6b707508a5f6" (UID: "45851133-0cc9-4714-9dea-6b707508a5f6"). InnerVolumeSpecName "kube-api-access-d2pv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.779584 5002 scope.go:117] "RemoveContainer" containerID="f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04" Dec 03 17:05:51 crc kubenswrapper[5002]: E1203 17:05:51.780187 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04\": container with ID starting with f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04 not found: ID does not exist" containerID="f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.780233 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04"} err="failed to get container status \"f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04\": rpc error: code = NotFound desc = could not find container \"f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04\": container with ID starting with f5264b2ca268441e2410c80db7fa881e9dd327f04f0c8a1e414943a85539de04 not found: ID does not exist" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.780284 5002 scope.go:117] "RemoveContainer" containerID="73eb650e6161828954ebaf42a2a72cd050139ff5ad0fb673e1e0d099eeda0986" Dec 03 17:05:51 crc kubenswrapper[5002]: E1203 17:05:51.780843 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73eb650e6161828954ebaf42a2a72cd050139ff5ad0fb673e1e0d099eeda0986\": container with ID starting with 73eb650e6161828954ebaf42a2a72cd050139ff5ad0fb673e1e0d099eeda0986 not found: ID does not exist" containerID="73eb650e6161828954ebaf42a2a72cd050139ff5ad0fb673e1e0d099eeda0986" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.780881 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73eb650e6161828954ebaf42a2a72cd050139ff5ad0fb673e1e0d099eeda0986"} err="failed to get container status \"73eb650e6161828954ebaf42a2a72cd050139ff5ad0fb673e1e0d099eeda0986\": rpc error: code = NotFound desc = could not find container \"73eb650e6161828954ebaf42a2a72cd050139ff5ad0fb673e1e0d099eeda0986\": container with ID starting with 73eb650e6161828954ebaf42a2a72cd050139ff5ad0fb673e1e0d099eeda0986 not found: ID does not exist" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.780899 5002 scope.go:117] "RemoveContainer" containerID="0b7a2af74abfe7563f7c10ec09e12308b6872dbfb68211144b3d59790804c049" Dec 03 17:05:51 crc kubenswrapper[5002]: E1203 17:05:51.781180 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b7a2af74abfe7563f7c10ec09e12308b6872dbfb68211144b3d59790804c049\": container with ID starting with 0b7a2af74abfe7563f7c10ec09e12308b6872dbfb68211144b3d59790804c049 not found: ID does not exist" containerID="0b7a2af74abfe7563f7c10ec09e12308b6872dbfb68211144b3d59790804c049" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.781208 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b7a2af74abfe7563f7c10ec09e12308b6872dbfb68211144b3d59790804c049"} err="failed to get container status \"0b7a2af74abfe7563f7c10ec09e12308b6872dbfb68211144b3d59790804c049\": rpc error: code = NotFound desc = could not find container \"0b7a2af74abfe7563f7c10ec09e12308b6872dbfb68211144b3d59790804c049\": container with ID starting with 0b7a2af74abfe7563f7c10ec09e12308b6872dbfb68211144b3d59790804c049 not found: ID does not exist" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.787301 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45851133-0cc9-4714-9dea-6b707508a5f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "45851133-0cc9-4714-9dea-6b707508a5f6" (UID: "45851133-0cc9-4714-9dea-6b707508a5f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.867407 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45851133-0cc9-4714-9dea-6b707508a5f6-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.867447 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45851133-0cc9-4714-9dea-6b707508a5f6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:05:51 crc kubenswrapper[5002]: I1203 17:05:51.867466 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2pv5\" (UniqueName: \"kubernetes.io/projected/45851133-0cc9-4714-9dea-6b707508a5f6-kube-api-access-d2pv5\") on node \"crc\" DevicePath \"\"" Dec 03 17:05:52 crc kubenswrapper[5002]: I1203 17:05:52.042659 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9v7wj"] Dec 03 17:05:52 crc kubenswrapper[5002]: I1203 17:05:52.050268 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9v7wj"] Dec 03 17:05:52 crc kubenswrapper[5002]: I1203 17:05:52.852544 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45851133-0cc9-4714-9dea-6b707508a5f6" path="/var/lib/kubelet/pods/45851133-0cc9-4714-9dea-6b707508a5f6/volumes" Dec 03 17:06:50 crc kubenswrapper[5002]: I1203 17:06:50.918835 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:06:50 crc kubenswrapper[5002]: I1203 17:06:50.919508 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:07:20 crc kubenswrapper[5002]: I1203 17:07:20.916576 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:07:20 crc kubenswrapper[5002]: I1203 17:07:20.918548 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.664900 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lp7rl"] Dec 03 17:07:25 crc kubenswrapper[5002]: E1203 17:07:25.666906 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45851133-0cc9-4714-9dea-6b707508a5f6" containerName="extract-utilities" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.667078 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="45851133-0cc9-4714-9dea-6b707508a5f6" containerName="extract-utilities" Dec 03 17:07:25 crc kubenswrapper[5002]: E1203 17:07:25.667227 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45851133-0cc9-4714-9dea-6b707508a5f6" containerName="extract-content" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.667342 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="45851133-0cc9-4714-9dea-6b707508a5f6" containerName="extract-content" Dec 03 17:07:25 crc kubenswrapper[5002]: E1203 17:07:25.667478 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45851133-0cc9-4714-9dea-6b707508a5f6" containerName="registry-server" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.667591 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="45851133-0cc9-4714-9dea-6b707508a5f6" containerName="registry-server" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.668059 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="45851133-0cc9-4714-9dea-6b707508a5f6" containerName="registry-server" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.670196 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.677599 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lp7rl"] Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.852526 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rf9mw\" (UniqueName: \"kubernetes.io/projected/37ec8c8f-3152-45a4-bf33-3fe7d4017692-kube-api-access-rf9mw\") pod \"certified-operators-lp7rl\" (UID: \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\") " pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.852636 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ec8c8f-3152-45a4-bf33-3fe7d4017692-utilities\") pod \"certified-operators-lp7rl\" (UID: \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\") " pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.852671 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ec8c8f-3152-45a4-bf33-3fe7d4017692-catalog-content\") pod \"certified-operators-lp7rl\" (UID: \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\") " pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.954359 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rf9mw\" (UniqueName: \"kubernetes.io/projected/37ec8c8f-3152-45a4-bf33-3fe7d4017692-kube-api-access-rf9mw\") pod \"certified-operators-lp7rl\" (UID: \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\") " pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.954481 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ec8c8f-3152-45a4-bf33-3fe7d4017692-utilities\") pod \"certified-operators-lp7rl\" (UID: \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\") " pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.954539 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ec8c8f-3152-45a4-bf33-3fe7d4017692-catalog-content\") pod \"certified-operators-lp7rl\" (UID: \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\") " pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.955261 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ec8c8f-3152-45a4-bf33-3fe7d4017692-catalog-content\") pod \"certified-operators-lp7rl\" (UID: \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\") " pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:25 crc kubenswrapper[5002]: I1203 17:07:25.955470 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ec8c8f-3152-45a4-bf33-3fe7d4017692-utilities\") pod \"certified-operators-lp7rl\" (UID: \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\") " pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:26 crc kubenswrapper[5002]: I1203 17:07:26.003268 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rf9mw\" (UniqueName: \"kubernetes.io/projected/37ec8c8f-3152-45a4-bf33-3fe7d4017692-kube-api-access-rf9mw\") pod \"certified-operators-lp7rl\" (UID: \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\") " pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:26 crc kubenswrapper[5002]: I1203 17:07:26.291760 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:26 crc kubenswrapper[5002]: I1203 17:07:26.762362 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lp7rl"] Dec 03 17:07:27 crc kubenswrapper[5002]: I1203 17:07:27.442435 5002 generic.go:334] "Generic (PLEG): container finished" podID="37ec8c8f-3152-45a4-bf33-3fe7d4017692" containerID="7ac55ffb9460e07b8bbef7cf8457ede40f0b6328e09e2a8b31a7d059acab36a5" exitCode=0 Dec 03 17:07:27 crc kubenswrapper[5002]: I1203 17:07:27.442478 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lp7rl" event={"ID":"37ec8c8f-3152-45a4-bf33-3fe7d4017692","Type":"ContainerDied","Data":"7ac55ffb9460e07b8bbef7cf8457ede40f0b6328e09e2a8b31a7d059acab36a5"} Dec 03 17:07:27 crc kubenswrapper[5002]: I1203 17:07:27.442789 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lp7rl" event={"ID":"37ec8c8f-3152-45a4-bf33-3fe7d4017692","Type":"ContainerStarted","Data":"a4408e42316c068f88ca20857f5469f16be94c87ffa71dde53f630b358b9cd45"} Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.058836 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hrj7n"] Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.060630 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.072817 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hrj7n"] Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.188853 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23c5884c-c96a-435e-867d-b27a40c076fb-utilities\") pod \"community-operators-hrj7n\" (UID: \"23c5884c-c96a-435e-867d-b27a40c076fb\") " pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.189431 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p87xt\" (UniqueName: \"kubernetes.io/projected/23c5884c-c96a-435e-867d-b27a40c076fb-kube-api-access-p87xt\") pod \"community-operators-hrj7n\" (UID: \"23c5884c-c96a-435e-867d-b27a40c076fb\") " pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.189486 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23c5884c-c96a-435e-867d-b27a40c076fb-catalog-content\") pod \"community-operators-hrj7n\" (UID: \"23c5884c-c96a-435e-867d-b27a40c076fb\") " pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.290945 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23c5884c-c96a-435e-867d-b27a40c076fb-catalog-content\") pod \"community-operators-hrj7n\" (UID: \"23c5884c-c96a-435e-867d-b27a40c076fb\") " pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.291091 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23c5884c-c96a-435e-867d-b27a40c076fb-utilities\") pod \"community-operators-hrj7n\" (UID: \"23c5884c-c96a-435e-867d-b27a40c076fb\") " pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.291132 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p87xt\" (UniqueName: \"kubernetes.io/projected/23c5884c-c96a-435e-867d-b27a40c076fb-kube-api-access-p87xt\") pod \"community-operators-hrj7n\" (UID: \"23c5884c-c96a-435e-867d-b27a40c076fb\") " pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.291560 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23c5884c-c96a-435e-867d-b27a40c076fb-catalog-content\") pod \"community-operators-hrj7n\" (UID: \"23c5884c-c96a-435e-867d-b27a40c076fb\") " pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.291587 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23c5884c-c96a-435e-867d-b27a40c076fb-utilities\") pod \"community-operators-hrj7n\" (UID: \"23c5884c-c96a-435e-867d-b27a40c076fb\") " pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.314462 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p87xt\" (UniqueName: \"kubernetes.io/projected/23c5884c-c96a-435e-867d-b27a40c076fb-kube-api-access-p87xt\") pod \"community-operators-hrj7n\" (UID: \"23c5884c-c96a-435e-867d-b27a40c076fb\") " pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.400142 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.453445 5002 generic.go:334] "Generic (PLEG): container finished" podID="37ec8c8f-3152-45a4-bf33-3fe7d4017692" containerID="fef75d01639bbaa7af24969b7faa766eca677faad25da8d4f05b8a3c25d98041" exitCode=0 Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.453490 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lp7rl" event={"ID":"37ec8c8f-3152-45a4-bf33-3fe7d4017692","Type":"ContainerDied","Data":"fef75d01639bbaa7af24969b7faa766eca677faad25da8d4f05b8a3c25d98041"} Dec 03 17:07:28 crc kubenswrapper[5002]: I1203 17:07:28.952238 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hrj7n"] Dec 03 17:07:28 crc kubenswrapper[5002]: W1203 17:07:28.962586 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23c5884c_c96a_435e_867d_b27a40c076fb.slice/crio-3ce41a42adf80ed05d3ed2238a33d03932c77c2c39da513f4d105e935528affe WatchSource:0}: Error finding container 3ce41a42adf80ed05d3ed2238a33d03932c77c2c39da513f4d105e935528affe: Status 404 returned error can't find the container with id 3ce41a42adf80ed05d3ed2238a33d03932c77c2c39da513f4d105e935528affe Dec 03 17:07:29 crc kubenswrapper[5002]: I1203 17:07:29.461369 5002 generic.go:334] "Generic (PLEG): container finished" podID="23c5884c-c96a-435e-867d-b27a40c076fb" containerID="abe4fc3ea10ec6c0f9a4f87412781fd0fc8bcb36e81252b0340e8c6417b3e3a1" exitCode=0 Dec 03 17:07:29 crc kubenswrapper[5002]: I1203 17:07:29.461428 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrj7n" event={"ID":"23c5884c-c96a-435e-867d-b27a40c076fb","Type":"ContainerDied","Data":"abe4fc3ea10ec6c0f9a4f87412781fd0fc8bcb36e81252b0340e8c6417b3e3a1"} Dec 03 17:07:29 crc kubenswrapper[5002]: I1203 17:07:29.461840 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrj7n" event={"ID":"23c5884c-c96a-435e-867d-b27a40c076fb","Type":"ContainerStarted","Data":"3ce41a42adf80ed05d3ed2238a33d03932c77c2c39da513f4d105e935528affe"} Dec 03 17:07:29 crc kubenswrapper[5002]: I1203 17:07:29.465490 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lp7rl" event={"ID":"37ec8c8f-3152-45a4-bf33-3fe7d4017692","Type":"ContainerStarted","Data":"4b06eb7c936199c539ee713b4ee2e8efbc9783d924353baa18941f79a16a6a15"} Dec 03 17:07:29 crc kubenswrapper[5002]: I1203 17:07:29.500168 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lp7rl" podStartSLOduration=2.9307391430000003 podStartE2EDuration="4.500147582s" podCreationTimestamp="2025-12-03 17:07:25 +0000 UTC" firstStartedPulling="2025-12-03 17:07:27.443998887 +0000 UTC m=+2170.857820775" lastFinishedPulling="2025-12-03 17:07:29.013407326 +0000 UTC m=+2172.427229214" observedRunningTime="2025-12-03 17:07:29.497410657 +0000 UTC m=+2172.911232565" watchObservedRunningTime="2025-12-03 17:07:29.500147582 +0000 UTC m=+2172.913969470" Dec 03 17:07:30 crc kubenswrapper[5002]: I1203 17:07:30.474311 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrj7n" event={"ID":"23c5884c-c96a-435e-867d-b27a40c076fb","Type":"ContainerStarted","Data":"6c3347a80f3649abacd6d4025c9bc6dd654bc7d1308026bba0e4422cdf731055"} Dec 03 17:07:31 crc kubenswrapper[5002]: I1203 17:07:31.484282 5002 generic.go:334] "Generic (PLEG): container finished" podID="23c5884c-c96a-435e-867d-b27a40c076fb" containerID="6c3347a80f3649abacd6d4025c9bc6dd654bc7d1308026bba0e4422cdf731055" exitCode=0 Dec 03 17:07:31 crc kubenswrapper[5002]: I1203 17:07:31.484328 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrj7n" event={"ID":"23c5884c-c96a-435e-867d-b27a40c076fb","Type":"ContainerDied","Data":"6c3347a80f3649abacd6d4025c9bc6dd654bc7d1308026bba0e4422cdf731055"} Dec 03 17:07:32 crc kubenswrapper[5002]: I1203 17:07:32.493720 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrj7n" event={"ID":"23c5884c-c96a-435e-867d-b27a40c076fb","Type":"ContainerStarted","Data":"c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3"} Dec 03 17:07:32 crc kubenswrapper[5002]: I1203 17:07:32.516199 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hrj7n" podStartSLOduration=1.920807805 podStartE2EDuration="4.516182383s" podCreationTimestamp="2025-12-03 17:07:28 +0000 UTC" firstStartedPulling="2025-12-03 17:07:29.463652781 +0000 UTC m=+2172.877474669" lastFinishedPulling="2025-12-03 17:07:32.059027359 +0000 UTC m=+2175.472849247" observedRunningTime="2025-12-03 17:07:32.515206726 +0000 UTC m=+2175.929028614" watchObservedRunningTime="2025-12-03 17:07:32.516182383 +0000 UTC m=+2175.930004271" Dec 03 17:07:36 crc kubenswrapper[5002]: I1203 17:07:36.293089 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:36 crc kubenswrapper[5002]: I1203 17:07:36.293842 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:36 crc kubenswrapper[5002]: I1203 17:07:36.355446 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:36 crc kubenswrapper[5002]: I1203 17:07:36.586639 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:36 crc kubenswrapper[5002]: I1203 17:07:36.652740 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lp7rl"] Dec 03 17:07:38 crc kubenswrapper[5002]: I1203 17:07:38.400273 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:38 crc kubenswrapper[5002]: I1203 17:07:38.401670 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:38 crc kubenswrapper[5002]: I1203 17:07:38.483603 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:38 crc kubenswrapper[5002]: I1203 17:07:38.535400 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lp7rl" podUID="37ec8c8f-3152-45a4-bf33-3fe7d4017692" containerName="registry-server" containerID="cri-o://4b06eb7c936199c539ee713b4ee2e8efbc9783d924353baa18941f79a16a6a15" gracePeriod=2 Dec 03 17:07:38 crc kubenswrapper[5002]: I1203 17:07:38.590708 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:38 crc kubenswrapper[5002]: I1203 17:07:38.989420 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hrj7n"] Dec 03 17:07:40 crc kubenswrapper[5002]: I1203 17:07:40.548631 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hrj7n" podUID="23c5884c-c96a-435e-867d-b27a40c076fb" containerName="registry-server" containerID="cri-o://c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3" gracePeriod=2 Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.109700 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.179154 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23c5884c-c96a-435e-867d-b27a40c076fb-utilities\") pod \"23c5884c-c96a-435e-867d-b27a40c076fb\" (UID: \"23c5884c-c96a-435e-867d-b27a40c076fb\") " Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.179270 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23c5884c-c96a-435e-867d-b27a40c076fb-catalog-content\") pod \"23c5884c-c96a-435e-867d-b27a40c076fb\" (UID: \"23c5884c-c96a-435e-867d-b27a40c076fb\") " Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.179389 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p87xt\" (UniqueName: \"kubernetes.io/projected/23c5884c-c96a-435e-867d-b27a40c076fb-kube-api-access-p87xt\") pod \"23c5884c-c96a-435e-867d-b27a40c076fb\" (UID: \"23c5884c-c96a-435e-867d-b27a40c076fb\") " Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.181717 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23c5884c-c96a-435e-867d-b27a40c076fb-utilities" (OuterVolumeSpecName: "utilities") pod "23c5884c-c96a-435e-867d-b27a40c076fb" (UID: "23c5884c-c96a-435e-867d-b27a40c076fb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.186469 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23c5884c-c96a-435e-867d-b27a40c076fb-kube-api-access-p87xt" (OuterVolumeSpecName: "kube-api-access-p87xt") pod "23c5884c-c96a-435e-867d-b27a40c076fb" (UID: "23c5884c-c96a-435e-867d-b27a40c076fb"). InnerVolumeSpecName "kube-api-access-p87xt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.235997 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23c5884c-c96a-435e-867d-b27a40c076fb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "23c5884c-c96a-435e-867d-b27a40c076fb" (UID: "23c5884c-c96a-435e-867d-b27a40c076fb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.281800 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p87xt\" (UniqueName: \"kubernetes.io/projected/23c5884c-c96a-435e-867d-b27a40c076fb-kube-api-access-p87xt\") on node \"crc\" DevicePath \"\"" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.281853 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23c5884c-c96a-435e-867d-b27a40c076fb-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.281864 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23c5884c-c96a-435e-867d-b27a40c076fb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.558899 5002 generic.go:334] "Generic (PLEG): container finished" podID="23c5884c-c96a-435e-867d-b27a40c076fb" containerID="c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3" exitCode=0 Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.559006 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrj7n" event={"ID":"23c5884c-c96a-435e-867d-b27a40c076fb","Type":"ContainerDied","Data":"c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3"} Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.559016 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hrj7n" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.559069 5002 scope.go:117] "RemoveContainer" containerID="c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.559050 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrj7n" event={"ID":"23c5884c-c96a-435e-867d-b27a40c076fb","Type":"ContainerDied","Data":"3ce41a42adf80ed05d3ed2238a33d03932c77c2c39da513f4d105e935528affe"} Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.563495 5002 generic.go:334] "Generic (PLEG): container finished" podID="37ec8c8f-3152-45a4-bf33-3fe7d4017692" containerID="4b06eb7c936199c539ee713b4ee2e8efbc9783d924353baa18941f79a16a6a15" exitCode=0 Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.563545 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lp7rl" event={"ID":"37ec8c8f-3152-45a4-bf33-3fe7d4017692","Type":"ContainerDied","Data":"4b06eb7c936199c539ee713b4ee2e8efbc9783d924353baa18941f79a16a6a15"} Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.590907 5002 scope.go:117] "RemoveContainer" containerID="6c3347a80f3649abacd6d4025c9bc6dd654bc7d1308026bba0e4422cdf731055" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.598614 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hrj7n"] Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.605522 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hrj7n"] Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.614384 5002 scope.go:117] "RemoveContainer" containerID="abe4fc3ea10ec6c0f9a4f87412781fd0fc8bcb36e81252b0340e8c6417b3e3a1" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.633429 5002 scope.go:117] "RemoveContainer" containerID="c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3" Dec 03 17:07:41 crc kubenswrapper[5002]: E1203 17:07:41.633973 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3\": container with ID starting with c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3 not found: ID does not exist" containerID="c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.634020 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3"} err="failed to get container status \"c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3\": rpc error: code = NotFound desc = could not find container \"c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3\": container with ID starting with c270dfbf85707dfe523ca787887db7a9d7234eb4bcdc6d1ffb2b51775b4a07a3 not found: ID does not exist" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.634050 5002 scope.go:117] "RemoveContainer" containerID="6c3347a80f3649abacd6d4025c9bc6dd654bc7d1308026bba0e4422cdf731055" Dec 03 17:07:41 crc kubenswrapper[5002]: E1203 17:07:41.634389 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c3347a80f3649abacd6d4025c9bc6dd654bc7d1308026bba0e4422cdf731055\": container with ID starting with 6c3347a80f3649abacd6d4025c9bc6dd654bc7d1308026bba0e4422cdf731055 not found: ID does not exist" containerID="6c3347a80f3649abacd6d4025c9bc6dd654bc7d1308026bba0e4422cdf731055" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.634418 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c3347a80f3649abacd6d4025c9bc6dd654bc7d1308026bba0e4422cdf731055"} err="failed to get container status \"6c3347a80f3649abacd6d4025c9bc6dd654bc7d1308026bba0e4422cdf731055\": rpc error: code = NotFound desc = could not find container \"6c3347a80f3649abacd6d4025c9bc6dd654bc7d1308026bba0e4422cdf731055\": container with ID starting with 6c3347a80f3649abacd6d4025c9bc6dd654bc7d1308026bba0e4422cdf731055 not found: ID does not exist" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.634435 5002 scope.go:117] "RemoveContainer" containerID="abe4fc3ea10ec6c0f9a4f87412781fd0fc8bcb36e81252b0340e8c6417b3e3a1" Dec 03 17:07:41 crc kubenswrapper[5002]: E1203 17:07:41.634699 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abe4fc3ea10ec6c0f9a4f87412781fd0fc8bcb36e81252b0340e8c6417b3e3a1\": container with ID starting with abe4fc3ea10ec6c0f9a4f87412781fd0fc8bcb36e81252b0340e8c6417b3e3a1 not found: ID does not exist" containerID="abe4fc3ea10ec6c0f9a4f87412781fd0fc8bcb36e81252b0340e8c6417b3e3a1" Dec 03 17:07:41 crc kubenswrapper[5002]: I1203 17:07:41.634731 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abe4fc3ea10ec6c0f9a4f87412781fd0fc8bcb36e81252b0340e8c6417b3e3a1"} err="failed to get container status \"abe4fc3ea10ec6c0f9a4f87412781fd0fc8bcb36e81252b0340e8c6417b3e3a1\": rpc error: code = NotFound desc = could not find container \"abe4fc3ea10ec6c0f9a4f87412781fd0fc8bcb36e81252b0340e8c6417b3e3a1\": container with ID starting with abe4fc3ea10ec6c0f9a4f87412781fd0fc8bcb36e81252b0340e8c6417b3e3a1 not found: ID does not exist" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.022172 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.094620 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rf9mw\" (UniqueName: \"kubernetes.io/projected/37ec8c8f-3152-45a4-bf33-3fe7d4017692-kube-api-access-rf9mw\") pod \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\" (UID: \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\") " Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.094695 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ec8c8f-3152-45a4-bf33-3fe7d4017692-utilities\") pod \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\" (UID: \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\") " Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.094823 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ec8c8f-3152-45a4-bf33-3fe7d4017692-catalog-content\") pod \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\" (UID: \"37ec8c8f-3152-45a4-bf33-3fe7d4017692\") " Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.095874 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37ec8c8f-3152-45a4-bf33-3fe7d4017692-utilities" (OuterVolumeSpecName: "utilities") pod "37ec8c8f-3152-45a4-bf33-3fe7d4017692" (UID: "37ec8c8f-3152-45a4-bf33-3fe7d4017692"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.097909 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37ec8c8f-3152-45a4-bf33-3fe7d4017692-kube-api-access-rf9mw" (OuterVolumeSpecName: "kube-api-access-rf9mw") pod "37ec8c8f-3152-45a4-bf33-3fe7d4017692" (UID: "37ec8c8f-3152-45a4-bf33-3fe7d4017692"). InnerVolumeSpecName "kube-api-access-rf9mw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.159439 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37ec8c8f-3152-45a4-bf33-3fe7d4017692-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37ec8c8f-3152-45a4-bf33-3fe7d4017692" (UID: "37ec8c8f-3152-45a4-bf33-3fe7d4017692"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.196811 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ec8c8f-3152-45a4-bf33-3fe7d4017692-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.196857 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rf9mw\" (UniqueName: \"kubernetes.io/projected/37ec8c8f-3152-45a4-bf33-3fe7d4017692-kube-api-access-rf9mw\") on node \"crc\" DevicePath \"\"" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.196873 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ec8c8f-3152-45a4-bf33-3fe7d4017692-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.574638 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lp7rl" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.574691 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lp7rl" event={"ID":"37ec8c8f-3152-45a4-bf33-3fe7d4017692","Type":"ContainerDied","Data":"a4408e42316c068f88ca20857f5469f16be94c87ffa71dde53f630b358b9cd45"} Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.574785 5002 scope.go:117] "RemoveContainer" containerID="4b06eb7c936199c539ee713b4ee2e8efbc9783d924353baa18941f79a16a6a15" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.599875 5002 scope.go:117] "RemoveContainer" containerID="fef75d01639bbaa7af24969b7faa766eca677faad25da8d4f05b8a3c25d98041" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.625131 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lp7rl"] Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.629149 5002 scope.go:117] "RemoveContainer" containerID="7ac55ffb9460e07b8bbef7cf8457ede40f0b6328e09e2a8b31a7d059acab36a5" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.634284 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lp7rl"] Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.850371 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23c5884c-c96a-435e-867d-b27a40c076fb" path="/var/lib/kubelet/pods/23c5884c-c96a-435e-867d-b27a40c076fb/volumes" Dec 03 17:07:42 crc kubenswrapper[5002]: I1203 17:07:42.851504 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37ec8c8f-3152-45a4-bf33-3fe7d4017692" path="/var/lib/kubelet/pods/37ec8c8f-3152-45a4-bf33-3fe7d4017692/volumes" Dec 03 17:07:50 crc kubenswrapper[5002]: I1203 17:07:50.917346 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:07:50 crc kubenswrapper[5002]: I1203 17:07:50.918086 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:07:50 crc kubenswrapper[5002]: I1203 17:07:50.918135 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 17:07:50 crc kubenswrapper[5002]: I1203 17:07:50.918707 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a0bfa093eae7ea75c7d379907ed03f32918cd5d417daedcc012cf138d236f7d8"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:07:50 crc kubenswrapper[5002]: I1203 17:07:50.918790 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://a0bfa093eae7ea75c7d379907ed03f32918cd5d417daedcc012cf138d236f7d8" gracePeriod=600 Dec 03 17:07:51 crc kubenswrapper[5002]: I1203 17:07:51.645106 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="a0bfa093eae7ea75c7d379907ed03f32918cd5d417daedcc012cf138d236f7d8" exitCode=0 Dec 03 17:07:51 crc kubenswrapper[5002]: I1203 17:07:51.645283 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"a0bfa093eae7ea75c7d379907ed03f32918cd5d417daedcc012cf138d236f7d8"} Dec 03 17:07:51 crc kubenswrapper[5002]: I1203 17:07:51.645765 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2"} Dec 03 17:07:51 crc kubenswrapper[5002]: I1203 17:07:51.645793 5002 scope.go:117] "RemoveContainer" containerID="0d69905d7ff52ce8699bb9f8413e2e09832891862e1417e2c2d33fa37019251e" Dec 03 17:10:20 crc kubenswrapper[5002]: I1203 17:10:20.916816 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:10:20 crc kubenswrapper[5002]: I1203 17:10:20.917823 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:10:50 crc kubenswrapper[5002]: I1203 17:10:50.916492 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:10:50 crc kubenswrapper[5002]: I1203 17:10:50.917403 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:11:20 crc kubenswrapper[5002]: I1203 17:11:20.916394 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:11:20 crc kubenswrapper[5002]: I1203 17:11:20.917095 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:11:20 crc kubenswrapper[5002]: I1203 17:11:20.917154 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 17:11:20 crc kubenswrapper[5002]: I1203 17:11:20.917953 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:11:20 crc kubenswrapper[5002]: I1203 17:11:20.918031 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" gracePeriod=600 Dec 03 17:11:21 crc kubenswrapper[5002]: E1203 17:11:21.812184 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:11:21 crc kubenswrapper[5002]: I1203 17:11:21.834802 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" exitCode=0 Dec 03 17:11:21 crc kubenswrapper[5002]: I1203 17:11:21.834877 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2"} Dec 03 17:11:21 crc kubenswrapper[5002]: I1203 17:11:21.834980 5002 scope.go:117] "RemoveContainer" containerID="a0bfa093eae7ea75c7d379907ed03f32918cd5d417daedcc012cf138d236f7d8" Dec 03 17:11:21 crc kubenswrapper[5002]: I1203 17:11:21.835892 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:11:21 crc kubenswrapper[5002]: E1203 17:11:21.836325 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:11:36 crc kubenswrapper[5002]: I1203 17:11:36.845388 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:11:36 crc kubenswrapper[5002]: E1203 17:11:36.846618 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:11:51 crc kubenswrapper[5002]: I1203 17:11:51.840864 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:11:51 crc kubenswrapper[5002]: E1203 17:11:51.841614 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:12:02 crc kubenswrapper[5002]: I1203 17:12:02.841074 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:12:02 crc kubenswrapper[5002]: E1203 17:12:02.841787 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:12:13 crc kubenswrapper[5002]: I1203 17:12:13.841560 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:12:13 crc kubenswrapper[5002]: E1203 17:12:13.842785 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:12:26 crc kubenswrapper[5002]: I1203 17:12:26.844649 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:12:26 crc kubenswrapper[5002]: E1203 17:12:26.846012 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:12:37 crc kubenswrapper[5002]: I1203 17:12:37.840997 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:12:37 crc kubenswrapper[5002]: E1203 17:12:37.842117 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:12:49 crc kubenswrapper[5002]: I1203 17:12:49.840369 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:12:49 crc kubenswrapper[5002]: E1203 17:12:49.841866 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:13:04 crc kubenswrapper[5002]: I1203 17:13:04.841162 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:13:04 crc kubenswrapper[5002]: E1203 17:13:04.842073 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:13:15 crc kubenswrapper[5002]: I1203 17:13:15.841062 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:13:15 crc kubenswrapper[5002]: E1203 17:13:15.843569 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:13:29 crc kubenswrapper[5002]: I1203 17:13:29.841009 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:13:29 crc kubenswrapper[5002]: E1203 17:13:29.841877 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:13:41 crc kubenswrapper[5002]: I1203 17:13:41.840948 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:13:41 crc kubenswrapper[5002]: E1203 17:13:41.842117 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:13:56 crc kubenswrapper[5002]: I1203 17:13:56.849404 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:13:56 crc kubenswrapper[5002]: E1203 17:13:56.850479 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:14:11 crc kubenswrapper[5002]: I1203 17:14:11.840072 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:14:11 crc kubenswrapper[5002]: E1203 17:14:11.840909 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:14:26 crc kubenswrapper[5002]: I1203 17:14:26.844030 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:14:26 crc kubenswrapper[5002]: E1203 17:14:26.844692 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:14:38 crc kubenswrapper[5002]: I1203 17:14:38.840458 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:14:38 crc kubenswrapper[5002]: E1203 17:14:38.841204 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:14:49 crc kubenswrapper[5002]: I1203 17:14:49.840519 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:14:49 crc kubenswrapper[5002]: E1203 17:14:49.841669 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.142003 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw"] Dec 03 17:15:00 crc kubenswrapper[5002]: E1203 17:15:00.144039 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37ec8c8f-3152-45a4-bf33-3fe7d4017692" containerName="extract-utilities" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.144125 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="37ec8c8f-3152-45a4-bf33-3fe7d4017692" containerName="extract-utilities" Dec 03 17:15:00 crc kubenswrapper[5002]: E1203 17:15:00.144193 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c5884c-c96a-435e-867d-b27a40c076fb" containerName="extract-content" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.144247 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c5884c-c96a-435e-867d-b27a40c076fb" containerName="extract-content" Dec 03 17:15:00 crc kubenswrapper[5002]: E1203 17:15:00.144312 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c5884c-c96a-435e-867d-b27a40c076fb" containerName="extract-utilities" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.144368 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c5884c-c96a-435e-867d-b27a40c076fb" containerName="extract-utilities" Dec 03 17:15:00 crc kubenswrapper[5002]: E1203 17:15:00.144452 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37ec8c8f-3152-45a4-bf33-3fe7d4017692" containerName="extract-content" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.144525 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="37ec8c8f-3152-45a4-bf33-3fe7d4017692" containerName="extract-content" Dec 03 17:15:00 crc kubenswrapper[5002]: E1203 17:15:00.144601 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37ec8c8f-3152-45a4-bf33-3fe7d4017692" containerName="registry-server" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.144670 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="37ec8c8f-3152-45a4-bf33-3fe7d4017692" containerName="registry-server" Dec 03 17:15:00 crc kubenswrapper[5002]: E1203 17:15:00.144776 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c5884c-c96a-435e-867d-b27a40c076fb" containerName="registry-server" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.144848 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c5884c-c96a-435e-867d-b27a40c076fb" containerName="registry-server" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.145089 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="37ec8c8f-3152-45a4-bf33-3fe7d4017692" containerName="registry-server" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.145184 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="23c5884c-c96a-435e-867d-b27a40c076fb" containerName="registry-server" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.145855 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.148464 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.148622 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.162159 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw"] Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.330691 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-secret-volume\") pod \"collect-profiles-29413035-5lmtw\" (UID: \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.330763 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwfjz\" (UniqueName: \"kubernetes.io/projected/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-kube-api-access-vwfjz\") pod \"collect-profiles-29413035-5lmtw\" (UID: \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.330801 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-config-volume\") pod \"collect-profiles-29413035-5lmtw\" (UID: \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.432417 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-secret-volume\") pod \"collect-profiles-29413035-5lmtw\" (UID: \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.432519 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwfjz\" (UniqueName: \"kubernetes.io/projected/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-kube-api-access-vwfjz\") pod \"collect-profiles-29413035-5lmtw\" (UID: \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.432554 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-config-volume\") pod \"collect-profiles-29413035-5lmtw\" (UID: \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.433961 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-config-volume\") pod \"collect-profiles-29413035-5lmtw\" (UID: \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.438627 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-secret-volume\") pod \"collect-profiles-29413035-5lmtw\" (UID: \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.459512 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwfjz\" (UniqueName: \"kubernetes.io/projected/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-kube-api-access-vwfjz\") pod \"collect-profiles-29413035-5lmtw\" (UID: \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.467468 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:00 crc kubenswrapper[5002]: I1203 17:15:00.952203 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw"] Dec 03 17:15:01 crc kubenswrapper[5002]: I1203 17:15:01.271870 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" event={"ID":"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6","Type":"ContainerStarted","Data":"154a41ad0cbc51427ac65a9e61f147c65d0577f6d0a3ae81df45c7247d68a609"} Dec 03 17:15:01 crc kubenswrapper[5002]: I1203 17:15:01.272205 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" event={"ID":"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6","Type":"ContainerStarted","Data":"b9ccd5bb1ca07789728f36f61bb910627b1d8e1db071cc66dfba0f6520826679"} Dec 03 17:15:01 crc kubenswrapper[5002]: I1203 17:15:01.292424 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" podStartSLOduration=1.292408922 podStartE2EDuration="1.292408922s" podCreationTimestamp="2025-12-03 17:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:15:01.289495373 +0000 UTC m=+2624.703317251" watchObservedRunningTime="2025-12-03 17:15:01.292408922 +0000 UTC m=+2624.706230810" Dec 03 17:15:02 crc kubenswrapper[5002]: I1203 17:15:02.279390 5002 generic.go:334] "Generic (PLEG): container finished" podID="9e5e5b3d-426f-4d32-9012-b281bf5bf8b6" containerID="154a41ad0cbc51427ac65a9e61f147c65d0577f6d0a3ae81df45c7247d68a609" exitCode=0 Dec 03 17:15:02 crc kubenswrapper[5002]: I1203 17:15:02.279437 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" event={"ID":"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6","Type":"ContainerDied","Data":"154a41ad0cbc51427ac65a9e61f147c65d0577f6d0a3ae81df45c7247d68a609"} Dec 03 17:15:03 crc kubenswrapper[5002]: I1203 17:15:03.548987 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:03 crc kubenswrapper[5002]: I1203 17:15:03.673834 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-config-volume\") pod \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\" (UID: \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\") " Dec 03 17:15:03 crc kubenswrapper[5002]: I1203 17:15:03.673880 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-secret-volume\") pod \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\" (UID: \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\") " Dec 03 17:15:03 crc kubenswrapper[5002]: I1203 17:15:03.673928 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwfjz\" (UniqueName: \"kubernetes.io/projected/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-kube-api-access-vwfjz\") pod \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\" (UID: \"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6\") " Dec 03 17:15:03 crc kubenswrapper[5002]: I1203 17:15:03.674517 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-config-volume" (OuterVolumeSpecName: "config-volume") pod "9e5e5b3d-426f-4d32-9012-b281bf5bf8b6" (UID: "9e5e5b3d-426f-4d32-9012-b281bf5bf8b6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:15:03 crc kubenswrapper[5002]: I1203 17:15:03.675076 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:03 crc kubenswrapper[5002]: I1203 17:15:03.678699 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9e5e5b3d-426f-4d32-9012-b281bf5bf8b6" (UID: "9e5e5b3d-426f-4d32-9012-b281bf5bf8b6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:15:03 crc kubenswrapper[5002]: I1203 17:15:03.678728 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-kube-api-access-vwfjz" (OuterVolumeSpecName: "kube-api-access-vwfjz") pod "9e5e5b3d-426f-4d32-9012-b281bf5bf8b6" (UID: "9e5e5b3d-426f-4d32-9012-b281bf5bf8b6"). InnerVolumeSpecName "kube-api-access-vwfjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:15:03 crc kubenswrapper[5002]: I1203 17:15:03.775614 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:03 crc kubenswrapper[5002]: I1203 17:15:03.775672 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwfjz\" (UniqueName: \"kubernetes.io/projected/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6-kube-api-access-vwfjz\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:03 crc kubenswrapper[5002]: I1203 17:15:03.840270 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:15:03 crc kubenswrapper[5002]: E1203 17:15:03.840631 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:15:04 crc kubenswrapper[5002]: I1203 17:15:04.303856 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" event={"ID":"9e5e5b3d-426f-4d32-9012-b281bf5bf8b6","Type":"ContainerDied","Data":"b9ccd5bb1ca07789728f36f61bb910627b1d8e1db071cc66dfba0f6520826679"} Dec 03 17:15:04 crc kubenswrapper[5002]: I1203 17:15:04.304277 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9ccd5bb1ca07789728f36f61bb910627b1d8e1db071cc66dfba0f6520826679" Dec 03 17:15:04 crc kubenswrapper[5002]: I1203 17:15:04.304393 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw" Dec 03 17:15:04 crc kubenswrapper[5002]: I1203 17:15:04.614931 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp"] Dec 03 17:15:04 crc kubenswrapper[5002]: I1203 17:15:04.621409 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412990-d57rp"] Dec 03 17:15:04 crc kubenswrapper[5002]: I1203 17:15:04.848928 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4a92b26-42c8-4f22-bfba-6c63140c6501" path="/var/lib/kubelet/pods/f4a92b26-42c8-4f22-bfba-6c63140c6501/volumes" Dec 03 17:15:14 crc kubenswrapper[5002]: I1203 17:15:14.840456 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:15:14 crc kubenswrapper[5002]: E1203 17:15:14.841279 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:15:26 crc kubenswrapper[5002]: I1203 17:15:26.843876 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:15:26 crc kubenswrapper[5002]: E1203 17:15:26.844688 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:15:38 crc kubenswrapper[5002]: I1203 17:15:38.841047 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:15:38 crc kubenswrapper[5002]: E1203 17:15:38.842504 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:15:39 crc kubenswrapper[5002]: I1203 17:15:39.931779 5002 scope.go:117] "RemoveContainer" containerID="8a381b9d3db8b02b2ce9ab7c166d60f557510bb96325d5894c8a5ede1ec5ae4c" Dec 03 17:15:50 crc kubenswrapper[5002]: I1203 17:15:50.840147 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:15:50 crc kubenswrapper[5002]: E1203 17:15:50.841291 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:16:05 crc kubenswrapper[5002]: I1203 17:16:05.840431 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:16:05 crc kubenswrapper[5002]: E1203 17:16:05.841283 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:16:17 crc kubenswrapper[5002]: I1203 17:16:17.841522 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:16:17 crc kubenswrapper[5002]: E1203 17:16:17.842911 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:16:32 crc kubenswrapper[5002]: I1203 17:16:32.839917 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:16:33 crc kubenswrapper[5002]: I1203 17:16:33.992808 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"4a513e2e0684fc729e62b69e04c7ac2fec634ec485b5be340e792484ab808e14"} Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.497175 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rp9jb"] Dec 03 17:16:53 crc kubenswrapper[5002]: E1203 17:16:53.498019 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e5e5b3d-426f-4d32-9012-b281bf5bf8b6" containerName="collect-profiles" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.498035 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e5e5b3d-426f-4d32-9012-b281bf5bf8b6" containerName="collect-profiles" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.498196 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e5e5b3d-426f-4d32-9012-b281bf5bf8b6" containerName="collect-profiles" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.499440 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.515732 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rp9jb"] Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.600802 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/820b5f4f-b0fa-4ea8-844f-922fdd34d900-utilities\") pod \"redhat-marketplace-rp9jb\" (UID: \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\") " pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.600883 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/820b5f4f-b0fa-4ea8-844f-922fdd34d900-catalog-content\") pod \"redhat-marketplace-rp9jb\" (UID: \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\") " pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.600968 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94jkj\" (UniqueName: \"kubernetes.io/projected/820b5f4f-b0fa-4ea8-844f-922fdd34d900-kube-api-access-94jkj\") pod \"redhat-marketplace-rp9jb\" (UID: \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\") " pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.702002 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/820b5f4f-b0fa-4ea8-844f-922fdd34d900-utilities\") pod \"redhat-marketplace-rp9jb\" (UID: \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\") " pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.702324 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/820b5f4f-b0fa-4ea8-844f-922fdd34d900-catalog-content\") pod \"redhat-marketplace-rp9jb\" (UID: \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\") " pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.702452 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94jkj\" (UniqueName: \"kubernetes.io/projected/820b5f4f-b0fa-4ea8-844f-922fdd34d900-kube-api-access-94jkj\") pod \"redhat-marketplace-rp9jb\" (UID: \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\") " pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.702690 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/820b5f4f-b0fa-4ea8-844f-922fdd34d900-utilities\") pod \"redhat-marketplace-rp9jb\" (UID: \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\") " pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.703026 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/820b5f4f-b0fa-4ea8-844f-922fdd34d900-catalog-content\") pod \"redhat-marketplace-rp9jb\" (UID: \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\") " pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.723908 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94jkj\" (UniqueName: \"kubernetes.io/projected/820b5f4f-b0fa-4ea8-844f-922fdd34d900-kube-api-access-94jkj\") pod \"redhat-marketplace-rp9jb\" (UID: \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\") " pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:16:53 crc kubenswrapper[5002]: I1203 17:16:53.819621 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:16:54 crc kubenswrapper[5002]: I1203 17:16:54.274373 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rp9jb"] Dec 03 17:16:54 crc kubenswrapper[5002]: W1203 17:16:54.280828 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod820b5f4f_b0fa_4ea8_844f_922fdd34d900.slice/crio-396b1e1ea20141d19a08e91846fc97dcf648ec33af36ad48ff7f9616bbf044d7 WatchSource:0}: Error finding container 396b1e1ea20141d19a08e91846fc97dcf648ec33af36ad48ff7f9616bbf044d7: Status 404 returned error can't find the container with id 396b1e1ea20141d19a08e91846fc97dcf648ec33af36ad48ff7f9616bbf044d7 Dec 03 17:16:55 crc kubenswrapper[5002]: I1203 17:16:55.163578 5002 generic.go:334] "Generic (PLEG): container finished" podID="820b5f4f-b0fa-4ea8-844f-922fdd34d900" containerID="614ee83a3d7118c5de3a317306a0c9102a8c5213e047fb0ad4a3fa0811f4e728" exitCode=0 Dec 03 17:16:55 crc kubenswrapper[5002]: I1203 17:16:55.163624 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rp9jb" event={"ID":"820b5f4f-b0fa-4ea8-844f-922fdd34d900","Type":"ContainerDied","Data":"614ee83a3d7118c5de3a317306a0c9102a8c5213e047fb0ad4a3fa0811f4e728"} Dec 03 17:16:55 crc kubenswrapper[5002]: I1203 17:16:55.163662 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rp9jb" event={"ID":"820b5f4f-b0fa-4ea8-844f-922fdd34d900","Type":"ContainerStarted","Data":"396b1e1ea20141d19a08e91846fc97dcf648ec33af36ad48ff7f9616bbf044d7"} Dec 03 17:16:55 crc kubenswrapper[5002]: I1203 17:16:55.166136 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:16:57 crc kubenswrapper[5002]: I1203 17:16:57.177980 5002 generic.go:334] "Generic (PLEG): container finished" podID="820b5f4f-b0fa-4ea8-844f-922fdd34d900" containerID="e364821a4c15b8fe4a39eefaa80292be99bcd1651e8238f962ab2e8d62f14e9f" exitCode=0 Dec 03 17:16:57 crc kubenswrapper[5002]: I1203 17:16:57.178075 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rp9jb" event={"ID":"820b5f4f-b0fa-4ea8-844f-922fdd34d900","Type":"ContainerDied","Data":"e364821a4c15b8fe4a39eefaa80292be99bcd1651e8238f962ab2e8d62f14e9f"} Dec 03 17:16:59 crc kubenswrapper[5002]: I1203 17:16:59.201943 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rp9jb" event={"ID":"820b5f4f-b0fa-4ea8-844f-922fdd34d900","Type":"ContainerStarted","Data":"9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72"} Dec 03 17:16:59 crc kubenswrapper[5002]: I1203 17:16:59.226957 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rp9jb" podStartSLOduration=3.019847845 podStartE2EDuration="6.22692393s" podCreationTimestamp="2025-12-03 17:16:53 +0000 UTC" firstStartedPulling="2025-12-03 17:16:55.165848328 +0000 UTC m=+2738.579670216" lastFinishedPulling="2025-12-03 17:16:58.372924413 +0000 UTC m=+2741.786746301" observedRunningTime="2025-12-03 17:16:59.225929053 +0000 UTC m=+2742.639750951" watchObservedRunningTime="2025-12-03 17:16:59.22692393 +0000 UTC m=+2742.640745818" Dec 03 17:17:03 crc kubenswrapper[5002]: I1203 17:17:03.819992 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:17:03 crc kubenswrapper[5002]: I1203 17:17:03.820592 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:17:03 crc kubenswrapper[5002]: I1203 17:17:03.867147 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:17:04 crc kubenswrapper[5002]: I1203 17:17:04.279499 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:17:04 crc kubenswrapper[5002]: I1203 17:17:04.328715 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rp9jb"] Dec 03 17:17:06 crc kubenswrapper[5002]: I1203 17:17:06.254463 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rp9jb" podUID="820b5f4f-b0fa-4ea8-844f-922fdd34d900" containerName="registry-server" containerID="cri-o://9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72" gracePeriod=2 Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.240664 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.268015 5002 generic.go:334] "Generic (PLEG): container finished" podID="820b5f4f-b0fa-4ea8-844f-922fdd34d900" containerID="9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72" exitCode=0 Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.268081 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rp9jb" event={"ID":"820b5f4f-b0fa-4ea8-844f-922fdd34d900","Type":"ContainerDied","Data":"9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72"} Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.268147 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rp9jb" event={"ID":"820b5f4f-b0fa-4ea8-844f-922fdd34d900","Type":"ContainerDied","Data":"396b1e1ea20141d19a08e91846fc97dcf648ec33af36ad48ff7f9616bbf044d7"} Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.268171 5002 scope.go:117] "RemoveContainer" containerID="9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.268325 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rp9jb" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.290779 5002 scope.go:117] "RemoveContainer" containerID="e364821a4c15b8fe4a39eefaa80292be99bcd1651e8238f962ab2e8d62f14e9f" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.315103 5002 scope.go:117] "RemoveContainer" containerID="614ee83a3d7118c5de3a317306a0c9102a8c5213e047fb0ad4a3fa0811f4e728" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.348902 5002 scope.go:117] "RemoveContainer" containerID="9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72" Dec 03 17:17:07 crc kubenswrapper[5002]: E1203 17:17:07.349572 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72\": container with ID starting with 9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72 not found: ID does not exist" containerID="9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.349665 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72"} err="failed to get container status \"9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72\": rpc error: code = NotFound desc = could not find container \"9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72\": container with ID starting with 9439b34bd43f87532db958fab743a392c20a08cbf0825eb42f5126292556ed72 not found: ID does not exist" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.349727 5002 scope.go:117] "RemoveContainer" containerID="e364821a4c15b8fe4a39eefaa80292be99bcd1651e8238f962ab2e8d62f14e9f" Dec 03 17:17:07 crc kubenswrapper[5002]: E1203 17:17:07.350490 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e364821a4c15b8fe4a39eefaa80292be99bcd1651e8238f962ab2e8d62f14e9f\": container with ID starting with e364821a4c15b8fe4a39eefaa80292be99bcd1651e8238f962ab2e8d62f14e9f not found: ID does not exist" containerID="e364821a4c15b8fe4a39eefaa80292be99bcd1651e8238f962ab2e8d62f14e9f" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.350537 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e364821a4c15b8fe4a39eefaa80292be99bcd1651e8238f962ab2e8d62f14e9f"} err="failed to get container status \"e364821a4c15b8fe4a39eefaa80292be99bcd1651e8238f962ab2e8d62f14e9f\": rpc error: code = NotFound desc = could not find container \"e364821a4c15b8fe4a39eefaa80292be99bcd1651e8238f962ab2e8d62f14e9f\": container with ID starting with e364821a4c15b8fe4a39eefaa80292be99bcd1651e8238f962ab2e8d62f14e9f not found: ID does not exist" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.350568 5002 scope.go:117] "RemoveContainer" containerID="614ee83a3d7118c5de3a317306a0c9102a8c5213e047fb0ad4a3fa0811f4e728" Dec 03 17:17:07 crc kubenswrapper[5002]: E1203 17:17:07.351137 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"614ee83a3d7118c5de3a317306a0c9102a8c5213e047fb0ad4a3fa0811f4e728\": container with ID starting with 614ee83a3d7118c5de3a317306a0c9102a8c5213e047fb0ad4a3fa0811f4e728 not found: ID does not exist" containerID="614ee83a3d7118c5de3a317306a0c9102a8c5213e047fb0ad4a3fa0811f4e728" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.351195 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"614ee83a3d7118c5de3a317306a0c9102a8c5213e047fb0ad4a3fa0811f4e728"} err="failed to get container status \"614ee83a3d7118c5de3a317306a0c9102a8c5213e047fb0ad4a3fa0811f4e728\": rpc error: code = NotFound desc = could not find container \"614ee83a3d7118c5de3a317306a0c9102a8c5213e047fb0ad4a3fa0811f4e728\": container with ID starting with 614ee83a3d7118c5de3a317306a0c9102a8c5213e047fb0ad4a3fa0811f4e728 not found: ID does not exist" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.416073 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/820b5f4f-b0fa-4ea8-844f-922fdd34d900-utilities\") pod \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\" (UID: \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\") " Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.416228 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94jkj\" (UniqueName: \"kubernetes.io/projected/820b5f4f-b0fa-4ea8-844f-922fdd34d900-kube-api-access-94jkj\") pod \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\" (UID: \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\") " Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.416310 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/820b5f4f-b0fa-4ea8-844f-922fdd34d900-catalog-content\") pod \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\" (UID: \"820b5f4f-b0fa-4ea8-844f-922fdd34d900\") " Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.417352 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/820b5f4f-b0fa-4ea8-844f-922fdd34d900-utilities" (OuterVolumeSpecName: "utilities") pod "820b5f4f-b0fa-4ea8-844f-922fdd34d900" (UID: "820b5f4f-b0fa-4ea8-844f-922fdd34d900"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.422884 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/820b5f4f-b0fa-4ea8-844f-922fdd34d900-kube-api-access-94jkj" (OuterVolumeSpecName: "kube-api-access-94jkj") pod "820b5f4f-b0fa-4ea8-844f-922fdd34d900" (UID: "820b5f4f-b0fa-4ea8-844f-922fdd34d900"). InnerVolumeSpecName "kube-api-access-94jkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.441238 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/820b5f4f-b0fa-4ea8-844f-922fdd34d900-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "820b5f4f-b0fa-4ea8-844f-922fdd34d900" (UID: "820b5f4f-b0fa-4ea8-844f-922fdd34d900"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.517917 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/820b5f4f-b0fa-4ea8-844f-922fdd34d900-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.517959 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/820b5f4f-b0fa-4ea8-844f-922fdd34d900-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.517970 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94jkj\" (UniqueName: \"kubernetes.io/projected/820b5f4f-b0fa-4ea8-844f-922fdd34d900-kube-api-access-94jkj\") on node \"crc\" DevicePath \"\"" Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.601064 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rp9jb"] Dec 03 17:17:07 crc kubenswrapper[5002]: I1203 17:17:07.607677 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rp9jb"] Dec 03 17:17:08 crc kubenswrapper[5002]: I1203 17:17:08.851845 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="820b5f4f-b0fa-4ea8-844f-922fdd34d900" path="/var/lib/kubelet/pods/820b5f4f-b0fa-4ea8-844f-922fdd34d900/volumes" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.382505 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dpw5p"] Dec 03 17:17:49 crc kubenswrapper[5002]: E1203 17:17:49.384000 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="820b5f4f-b0fa-4ea8-844f-922fdd34d900" containerName="extract-utilities" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.384018 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="820b5f4f-b0fa-4ea8-844f-922fdd34d900" containerName="extract-utilities" Dec 03 17:17:49 crc kubenswrapper[5002]: E1203 17:17:49.384032 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="820b5f4f-b0fa-4ea8-844f-922fdd34d900" containerName="registry-server" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.384042 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="820b5f4f-b0fa-4ea8-844f-922fdd34d900" containerName="registry-server" Dec 03 17:17:49 crc kubenswrapper[5002]: E1203 17:17:49.384070 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="820b5f4f-b0fa-4ea8-844f-922fdd34d900" containerName="extract-content" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.384078 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="820b5f4f-b0fa-4ea8-844f-922fdd34d900" containerName="extract-content" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.384278 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="820b5f4f-b0fa-4ea8-844f-922fdd34d900" containerName="registry-server" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.385549 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.402835 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dpw5p"] Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.433532 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb1d6468-7bd7-4153-bd37-07352626f10e-utilities\") pod \"community-operators-dpw5p\" (UID: \"bb1d6468-7bd7-4153-bd37-07352626f10e\") " pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.433607 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb1d6468-7bd7-4153-bd37-07352626f10e-catalog-content\") pod \"community-operators-dpw5p\" (UID: \"bb1d6468-7bd7-4153-bd37-07352626f10e\") " pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.433835 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shpdx\" (UniqueName: \"kubernetes.io/projected/bb1d6468-7bd7-4153-bd37-07352626f10e-kube-api-access-shpdx\") pod \"community-operators-dpw5p\" (UID: \"bb1d6468-7bd7-4153-bd37-07352626f10e\") " pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.535849 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb1d6468-7bd7-4153-bd37-07352626f10e-utilities\") pod \"community-operators-dpw5p\" (UID: \"bb1d6468-7bd7-4153-bd37-07352626f10e\") " pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.535917 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb1d6468-7bd7-4153-bd37-07352626f10e-catalog-content\") pod \"community-operators-dpw5p\" (UID: \"bb1d6468-7bd7-4153-bd37-07352626f10e\") " pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.535966 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shpdx\" (UniqueName: \"kubernetes.io/projected/bb1d6468-7bd7-4153-bd37-07352626f10e-kube-api-access-shpdx\") pod \"community-operators-dpw5p\" (UID: \"bb1d6468-7bd7-4153-bd37-07352626f10e\") " pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.536465 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb1d6468-7bd7-4153-bd37-07352626f10e-utilities\") pod \"community-operators-dpw5p\" (UID: \"bb1d6468-7bd7-4153-bd37-07352626f10e\") " pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.536486 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb1d6468-7bd7-4153-bd37-07352626f10e-catalog-content\") pod \"community-operators-dpw5p\" (UID: \"bb1d6468-7bd7-4153-bd37-07352626f10e\") " pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.559321 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shpdx\" (UniqueName: \"kubernetes.io/projected/bb1d6468-7bd7-4153-bd37-07352626f10e-kube-api-access-shpdx\") pod \"community-operators-dpw5p\" (UID: \"bb1d6468-7bd7-4153-bd37-07352626f10e\") " pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:49 crc kubenswrapper[5002]: I1203 17:17:49.713221 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:50 crc kubenswrapper[5002]: I1203 17:17:50.180303 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dpw5p"] Dec 03 17:17:50 crc kubenswrapper[5002]: W1203 17:17:50.190776 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb1d6468_7bd7_4153_bd37_07352626f10e.slice/crio-630b628bd86ed0a5c94463a81ca886f5ed9236e53ae49101ed3930be6026de4c WatchSource:0}: Error finding container 630b628bd86ed0a5c94463a81ca886f5ed9236e53ae49101ed3930be6026de4c: Status 404 returned error can't find the container with id 630b628bd86ed0a5c94463a81ca886f5ed9236e53ae49101ed3930be6026de4c Dec 03 17:17:50 crc kubenswrapper[5002]: I1203 17:17:50.576135 5002 generic.go:334] "Generic (PLEG): container finished" podID="bb1d6468-7bd7-4153-bd37-07352626f10e" containerID="c31db2245b620672f6ece1f6180191e2f6aba6ce504bdda251cea61e50d793c5" exitCode=0 Dec 03 17:17:50 crc kubenswrapper[5002]: I1203 17:17:50.576205 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpw5p" event={"ID":"bb1d6468-7bd7-4153-bd37-07352626f10e","Type":"ContainerDied","Data":"c31db2245b620672f6ece1f6180191e2f6aba6ce504bdda251cea61e50d793c5"} Dec 03 17:17:50 crc kubenswrapper[5002]: I1203 17:17:50.576722 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpw5p" event={"ID":"bb1d6468-7bd7-4153-bd37-07352626f10e","Type":"ContainerStarted","Data":"630b628bd86ed0a5c94463a81ca886f5ed9236e53ae49101ed3930be6026de4c"} Dec 03 17:17:52 crc kubenswrapper[5002]: I1203 17:17:52.593172 5002 generic.go:334] "Generic (PLEG): container finished" podID="bb1d6468-7bd7-4153-bd37-07352626f10e" containerID="41035d461371c6d59e8d5460301e28a852035f64b06f1a21f1bcc956256f9995" exitCode=0 Dec 03 17:17:52 crc kubenswrapper[5002]: I1203 17:17:52.593289 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpw5p" event={"ID":"bb1d6468-7bd7-4153-bd37-07352626f10e","Type":"ContainerDied","Data":"41035d461371c6d59e8d5460301e28a852035f64b06f1a21f1bcc956256f9995"} Dec 03 17:17:53 crc kubenswrapper[5002]: I1203 17:17:53.603670 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpw5p" event={"ID":"bb1d6468-7bd7-4153-bd37-07352626f10e","Type":"ContainerStarted","Data":"9c09499d22222e783c11a038f0d97e73d55e2dbae989a7b1d3b88a1639ec0708"} Dec 03 17:17:53 crc kubenswrapper[5002]: I1203 17:17:53.628105 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dpw5p" podStartSLOduration=1.981538461 podStartE2EDuration="4.628068829s" podCreationTimestamp="2025-12-03 17:17:49 +0000 UTC" firstStartedPulling="2025-12-03 17:17:50.580093146 +0000 UTC m=+2793.993915034" lastFinishedPulling="2025-12-03 17:17:53.226623514 +0000 UTC m=+2796.640445402" observedRunningTime="2025-12-03 17:17:53.623871145 +0000 UTC m=+2797.037693053" watchObservedRunningTime="2025-12-03 17:17:53.628068829 +0000 UTC m=+2797.041890727" Dec 03 17:17:59 crc kubenswrapper[5002]: I1203 17:17:59.713709 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:59 crc kubenswrapper[5002]: I1203 17:17:59.715251 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:17:59 crc kubenswrapper[5002]: I1203 17:17:59.760035 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:18:00 crc kubenswrapper[5002]: I1203 17:18:00.719170 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:18:00 crc kubenswrapper[5002]: I1203 17:18:00.766549 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dpw5p"] Dec 03 17:18:02 crc kubenswrapper[5002]: I1203 17:18:02.686568 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dpw5p" podUID="bb1d6468-7bd7-4153-bd37-07352626f10e" containerName="registry-server" containerID="cri-o://9c09499d22222e783c11a038f0d97e73d55e2dbae989a7b1d3b88a1639ec0708" gracePeriod=2 Dec 03 17:18:04 crc kubenswrapper[5002]: I1203 17:18:04.703303 5002 generic.go:334] "Generic (PLEG): container finished" podID="bb1d6468-7bd7-4153-bd37-07352626f10e" containerID="9c09499d22222e783c11a038f0d97e73d55e2dbae989a7b1d3b88a1639ec0708" exitCode=0 Dec 03 17:18:04 crc kubenswrapper[5002]: I1203 17:18:04.703348 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpw5p" event={"ID":"bb1d6468-7bd7-4153-bd37-07352626f10e","Type":"ContainerDied","Data":"9c09499d22222e783c11a038f0d97e73d55e2dbae989a7b1d3b88a1639ec0708"} Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.114483 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.174267 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb1d6468-7bd7-4153-bd37-07352626f10e-catalog-content\") pod \"bb1d6468-7bd7-4153-bd37-07352626f10e\" (UID: \"bb1d6468-7bd7-4153-bd37-07352626f10e\") " Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.174371 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb1d6468-7bd7-4153-bd37-07352626f10e-utilities\") pod \"bb1d6468-7bd7-4153-bd37-07352626f10e\" (UID: \"bb1d6468-7bd7-4153-bd37-07352626f10e\") " Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.174437 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shpdx\" (UniqueName: \"kubernetes.io/projected/bb1d6468-7bd7-4153-bd37-07352626f10e-kube-api-access-shpdx\") pod \"bb1d6468-7bd7-4153-bd37-07352626f10e\" (UID: \"bb1d6468-7bd7-4153-bd37-07352626f10e\") " Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.175320 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb1d6468-7bd7-4153-bd37-07352626f10e-utilities" (OuterVolumeSpecName: "utilities") pod "bb1d6468-7bd7-4153-bd37-07352626f10e" (UID: "bb1d6468-7bd7-4153-bd37-07352626f10e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.182149 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb1d6468-7bd7-4153-bd37-07352626f10e-kube-api-access-shpdx" (OuterVolumeSpecName: "kube-api-access-shpdx") pod "bb1d6468-7bd7-4153-bd37-07352626f10e" (UID: "bb1d6468-7bd7-4153-bd37-07352626f10e"). InnerVolumeSpecName "kube-api-access-shpdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.236875 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb1d6468-7bd7-4153-bd37-07352626f10e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb1d6468-7bd7-4153-bd37-07352626f10e" (UID: "bb1d6468-7bd7-4153-bd37-07352626f10e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.276178 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb1d6468-7bd7-4153-bd37-07352626f10e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.276215 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb1d6468-7bd7-4153-bd37-07352626f10e-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.276228 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shpdx\" (UniqueName: \"kubernetes.io/projected/bb1d6468-7bd7-4153-bd37-07352626f10e-kube-api-access-shpdx\") on node \"crc\" DevicePath \"\"" Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.715880 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpw5p" event={"ID":"bb1d6468-7bd7-4153-bd37-07352626f10e","Type":"ContainerDied","Data":"630b628bd86ed0a5c94463a81ca886f5ed9236e53ae49101ed3930be6026de4c"} Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.715944 5002 scope.go:117] "RemoveContainer" containerID="9c09499d22222e783c11a038f0d97e73d55e2dbae989a7b1d3b88a1639ec0708" Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.716833 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpw5p" Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.748350 5002 scope.go:117] "RemoveContainer" containerID="41035d461371c6d59e8d5460301e28a852035f64b06f1a21f1bcc956256f9995" Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.761060 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dpw5p"] Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.768341 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dpw5p"] Dec 03 17:18:05 crc kubenswrapper[5002]: I1203 17:18:05.783695 5002 scope.go:117] "RemoveContainer" containerID="c31db2245b620672f6ece1f6180191e2f6aba6ce504bdda251cea61e50d793c5" Dec 03 17:18:06 crc kubenswrapper[5002]: I1203 17:18:06.849553 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb1d6468-7bd7-4153-bd37-07352626f10e" path="/var/lib/kubelet/pods/bb1d6468-7bd7-4153-bd37-07352626f10e/volumes" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.553907 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ck9nx"] Dec 03 17:18:13 crc kubenswrapper[5002]: E1203 17:18:13.555417 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb1d6468-7bd7-4153-bd37-07352626f10e" containerName="extract-content" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.555480 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb1d6468-7bd7-4153-bd37-07352626f10e" containerName="extract-content" Dec 03 17:18:13 crc kubenswrapper[5002]: E1203 17:18:13.555553 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb1d6468-7bd7-4153-bd37-07352626f10e" containerName="extract-utilities" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.555572 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb1d6468-7bd7-4153-bd37-07352626f10e" containerName="extract-utilities" Dec 03 17:18:13 crc kubenswrapper[5002]: E1203 17:18:13.555629 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb1d6468-7bd7-4153-bd37-07352626f10e" containerName="registry-server" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.555645 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb1d6468-7bd7-4153-bd37-07352626f10e" containerName="registry-server" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.556200 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb1d6468-7bd7-4153-bd37-07352626f10e" containerName="registry-server" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.558853 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.575604 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ck9nx"] Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.597648 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-catalog-content\") pod \"certified-operators-ck9nx\" (UID: \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\") " pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.597826 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-utilities\") pod \"certified-operators-ck9nx\" (UID: \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\") " pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.597924 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4c64\" (UniqueName: \"kubernetes.io/projected/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-kube-api-access-l4c64\") pod \"certified-operators-ck9nx\" (UID: \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\") " pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.699118 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-utilities\") pod \"certified-operators-ck9nx\" (UID: \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\") " pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.699189 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4c64\" (UniqueName: \"kubernetes.io/projected/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-kube-api-access-l4c64\") pod \"certified-operators-ck9nx\" (UID: \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\") " pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.699250 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-catalog-content\") pod \"certified-operators-ck9nx\" (UID: \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\") " pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.699655 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-utilities\") pod \"certified-operators-ck9nx\" (UID: \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\") " pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.699698 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-catalog-content\") pod \"certified-operators-ck9nx\" (UID: \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\") " pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.720072 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4c64\" (UniqueName: \"kubernetes.io/projected/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-kube-api-access-l4c64\") pod \"certified-operators-ck9nx\" (UID: \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\") " pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:13 crc kubenswrapper[5002]: I1203 17:18:13.889899 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:14 crc kubenswrapper[5002]: I1203 17:18:14.289881 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ck9nx"] Dec 03 17:18:14 crc kubenswrapper[5002]: I1203 17:18:14.790234 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck9nx" event={"ID":"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6","Type":"ContainerStarted","Data":"e9687eb1bc40eef17505d567c71f247ab0bedca093c8182a33243ef886a95a9f"} Dec 03 17:18:15 crc kubenswrapper[5002]: I1203 17:18:15.802118 5002 generic.go:334] "Generic (PLEG): container finished" podID="cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" containerID="c63fc5bb7f157da5e7823da0e6e8c17db579c32a29a794c8da8b321ad77c7601" exitCode=0 Dec 03 17:18:15 crc kubenswrapper[5002]: I1203 17:18:15.802197 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck9nx" event={"ID":"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6","Type":"ContainerDied","Data":"c63fc5bb7f157da5e7823da0e6e8c17db579c32a29a794c8da8b321ad77c7601"} Dec 03 17:18:16 crc kubenswrapper[5002]: I1203 17:18:16.816081 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck9nx" event={"ID":"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6","Type":"ContainerStarted","Data":"0d50f4b9399ec1e3955b0e6ce9f616ee6460bd1bc38f4e3062d97bcd6353453f"} Dec 03 17:18:17 crc kubenswrapper[5002]: I1203 17:18:17.829656 5002 generic.go:334] "Generic (PLEG): container finished" podID="cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" containerID="0d50f4b9399ec1e3955b0e6ce9f616ee6460bd1bc38f4e3062d97bcd6353453f" exitCode=0 Dec 03 17:18:17 crc kubenswrapper[5002]: I1203 17:18:17.829773 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck9nx" event={"ID":"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6","Type":"ContainerDied","Data":"0d50f4b9399ec1e3955b0e6ce9f616ee6460bd1bc38f4e3062d97bcd6353453f"} Dec 03 17:18:19 crc kubenswrapper[5002]: I1203 17:18:19.848685 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck9nx" event={"ID":"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6","Type":"ContainerStarted","Data":"46ba7aac409138128fcc726eb1188acee3e8dcee14b4ecdb77eee07022995d0e"} Dec 03 17:18:19 crc kubenswrapper[5002]: I1203 17:18:19.874656 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ck9nx" podStartSLOduration=3.6473356580000003 podStartE2EDuration="6.874530347s" podCreationTimestamp="2025-12-03 17:18:13 +0000 UTC" firstStartedPulling="2025-12-03 17:18:15.804305458 +0000 UTC m=+2819.218127356" lastFinishedPulling="2025-12-03 17:18:19.031500157 +0000 UTC m=+2822.445322045" observedRunningTime="2025-12-03 17:18:19.865623765 +0000 UTC m=+2823.279445663" watchObservedRunningTime="2025-12-03 17:18:19.874530347 +0000 UTC m=+2823.288352235" Dec 03 17:18:23 crc kubenswrapper[5002]: I1203 17:18:23.890241 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:23 crc kubenswrapper[5002]: I1203 17:18:23.890611 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:23 crc kubenswrapper[5002]: I1203 17:18:23.938637 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:24 crc kubenswrapper[5002]: I1203 17:18:24.926724 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:24 crc kubenswrapper[5002]: I1203 17:18:24.995840 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ck9nx"] Dec 03 17:18:26 crc kubenswrapper[5002]: I1203 17:18:26.900399 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ck9nx" podUID="cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" containerName="registry-server" containerID="cri-o://46ba7aac409138128fcc726eb1188acee3e8dcee14b4ecdb77eee07022995d0e" gracePeriod=2 Dec 03 17:18:27 crc kubenswrapper[5002]: I1203 17:18:27.908974 5002 generic.go:334] "Generic (PLEG): container finished" podID="cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" containerID="46ba7aac409138128fcc726eb1188acee3e8dcee14b4ecdb77eee07022995d0e" exitCode=0 Dec 03 17:18:27 crc kubenswrapper[5002]: I1203 17:18:27.909031 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck9nx" event={"ID":"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6","Type":"ContainerDied","Data":"46ba7aac409138128fcc726eb1188acee3e8dcee14b4ecdb77eee07022995d0e"} Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.420508 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.546186 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-utilities\") pod \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\" (UID: \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\") " Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.546262 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-catalog-content\") pod \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\" (UID: \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\") " Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.546411 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4c64\" (UniqueName: \"kubernetes.io/projected/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-kube-api-access-l4c64\") pod \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\" (UID: \"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6\") " Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.546922 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-utilities" (OuterVolumeSpecName: "utilities") pod "cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" (UID: "cb2c96aa-8920-4b5f-bc78-20e3abd3eea6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.554009 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-kube-api-access-l4c64" (OuterVolumeSpecName: "kube-api-access-l4c64") pod "cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" (UID: "cb2c96aa-8920-4b5f-bc78-20e3abd3eea6"). InnerVolumeSpecName "kube-api-access-l4c64". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.597034 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" (UID: "cb2c96aa-8920-4b5f-bc78-20e3abd3eea6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.648495 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4c64\" (UniqueName: \"kubernetes.io/projected/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-kube-api-access-l4c64\") on node \"crc\" DevicePath \"\"" Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.648582 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.648598 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.917663 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck9nx" event={"ID":"cb2c96aa-8920-4b5f-bc78-20e3abd3eea6","Type":"ContainerDied","Data":"e9687eb1bc40eef17505d567c71f247ab0bedca093c8182a33243ef886a95a9f"} Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.917736 5002 scope.go:117] "RemoveContainer" containerID="46ba7aac409138128fcc726eb1188acee3e8dcee14b4ecdb77eee07022995d0e" Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.917803 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ck9nx" Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.941203 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ck9nx"] Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.944628 5002 scope.go:117] "RemoveContainer" containerID="0d50f4b9399ec1e3955b0e6ce9f616ee6460bd1bc38f4e3062d97bcd6353453f" Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.949303 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ck9nx"] Dec 03 17:18:28 crc kubenswrapper[5002]: I1203 17:18:28.966328 5002 scope.go:117] "RemoveContainer" containerID="c63fc5bb7f157da5e7823da0e6e8c17db579c32a29a794c8da8b321ad77c7601" Dec 03 17:18:30 crc kubenswrapper[5002]: I1203 17:18:30.853648 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" path="/var/lib/kubelet/pods/cb2c96aa-8920-4b5f-bc78-20e3abd3eea6/volumes" Dec 03 17:18:50 crc kubenswrapper[5002]: I1203 17:18:50.917156 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:18:50 crc kubenswrapper[5002]: I1203 17:18:50.917692 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:19:20 crc kubenswrapper[5002]: I1203 17:19:20.916448 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:19:20 crc kubenswrapper[5002]: I1203 17:19:20.917132 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:19:50 crc kubenswrapper[5002]: I1203 17:19:50.917182 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:19:50 crc kubenswrapper[5002]: I1203 17:19:50.917804 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:19:50 crc kubenswrapper[5002]: I1203 17:19:50.919060 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 17:19:50 crc kubenswrapper[5002]: I1203 17:19:50.919724 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4a513e2e0684fc729e62b69e04c7ac2fec634ec485b5be340e792484ab808e14"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:19:50 crc kubenswrapper[5002]: I1203 17:19:50.919806 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://4a513e2e0684fc729e62b69e04c7ac2fec634ec485b5be340e792484ab808e14" gracePeriod=600 Dec 03 17:19:51 crc kubenswrapper[5002]: I1203 17:19:51.511507 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="4a513e2e0684fc729e62b69e04c7ac2fec634ec485b5be340e792484ab808e14" exitCode=0 Dec 03 17:19:51 crc kubenswrapper[5002]: I1203 17:19:51.511563 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"4a513e2e0684fc729e62b69e04c7ac2fec634ec485b5be340e792484ab808e14"} Dec 03 17:19:51 crc kubenswrapper[5002]: I1203 17:19:51.512183 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0"} Dec 03 17:19:51 crc kubenswrapper[5002]: I1203 17:19:51.512209 5002 scope.go:117] "RemoveContainer" containerID="d39ae8720ca92204b55fff5ec97b6257fb5d867a16851fe2bc6e200202ae9de2" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.434975 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-x7xv6"] Dec 03 17:21:51 crc kubenswrapper[5002]: E1203 17:21:51.435918 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" containerName="extract-utilities" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.435940 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" containerName="extract-utilities" Dec 03 17:21:51 crc kubenswrapper[5002]: E1203 17:21:51.435966 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" containerName="registry-server" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.435977 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" containerName="registry-server" Dec 03 17:21:51 crc kubenswrapper[5002]: E1203 17:21:51.436009 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" containerName="extract-content" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.436021 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" containerName="extract-content" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.436243 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb2c96aa-8920-4b5f-bc78-20e3abd3eea6" containerName="registry-server" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.437711 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.448294 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x7xv6"] Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.562126 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-catalog-content\") pod \"redhat-operators-x7xv6\" (UID: \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\") " pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.562207 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5gf8\" (UniqueName: \"kubernetes.io/projected/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-kube-api-access-b5gf8\") pod \"redhat-operators-x7xv6\" (UID: \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\") " pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.562241 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-utilities\") pod \"redhat-operators-x7xv6\" (UID: \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\") " pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.664539 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-catalog-content\") pod \"redhat-operators-x7xv6\" (UID: \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\") " pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.664645 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5gf8\" (UniqueName: \"kubernetes.io/projected/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-kube-api-access-b5gf8\") pod \"redhat-operators-x7xv6\" (UID: \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\") " pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.664698 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-utilities\") pod \"redhat-operators-x7xv6\" (UID: \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\") " pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.665211 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-utilities\") pod \"redhat-operators-x7xv6\" (UID: \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\") " pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.665460 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-catalog-content\") pod \"redhat-operators-x7xv6\" (UID: \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\") " pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.685029 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5gf8\" (UniqueName: \"kubernetes.io/projected/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-kube-api-access-b5gf8\") pod \"redhat-operators-x7xv6\" (UID: \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\") " pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:21:51 crc kubenswrapper[5002]: I1203 17:21:51.779469 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:21:52 crc kubenswrapper[5002]: I1203 17:21:52.235531 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x7xv6"] Dec 03 17:21:52 crc kubenswrapper[5002]: I1203 17:21:52.978950 5002 generic.go:334] "Generic (PLEG): container finished" podID="a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" containerID="394634d3fb01809e76d839864defb917c2293e6231b42022debebc9b26008b13" exitCode=0 Dec 03 17:21:52 crc kubenswrapper[5002]: I1203 17:21:52.979111 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7xv6" event={"ID":"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc","Type":"ContainerDied","Data":"394634d3fb01809e76d839864defb917c2293e6231b42022debebc9b26008b13"} Dec 03 17:21:52 crc kubenswrapper[5002]: I1203 17:21:52.979258 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7xv6" event={"ID":"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc","Type":"ContainerStarted","Data":"5399d38f716e1a251c22fb5193f646a4d1ed8f6dda0b90dc44d7fe04ce19f2b4"} Dec 03 17:21:53 crc kubenswrapper[5002]: I1203 17:21:53.987922 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7xv6" event={"ID":"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc","Type":"ContainerStarted","Data":"382d5fad9aac1659935cb956a1ce4be0f2ff07d5a39e4d220c6a76b81185578e"} Dec 03 17:21:54 crc kubenswrapper[5002]: I1203 17:21:54.997362 5002 generic.go:334] "Generic (PLEG): container finished" podID="a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" containerID="382d5fad9aac1659935cb956a1ce4be0f2ff07d5a39e4d220c6a76b81185578e" exitCode=0 Dec 03 17:21:54 crc kubenswrapper[5002]: I1203 17:21:54.997417 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7xv6" event={"ID":"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc","Type":"ContainerDied","Data":"382d5fad9aac1659935cb956a1ce4be0f2ff07d5a39e4d220c6a76b81185578e"} Dec 03 17:21:56 crc kubenswrapper[5002]: I1203 17:21:56.007659 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7xv6" event={"ID":"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc","Type":"ContainerStarted","Data":"ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca"} Dec 03 17:21:56 crc kubenswrapper[5002]: I1203 17:21:56.024724 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-x7xv6" podStartSLOduration=2.250999844 podStartE2EDuration="5.024703777s" podCreationTimestamp="2025-12-03 17:21:51 +0000 UTC" firstStartedPulling="2025-12-03 17:21:52.982069919 +0000 UTC m=+3036.395891817" lastFinishedPulling="2025-12-03 17:21:55.755773862 +0000 UTC m=+3039.169595750" observedRunningTime="2025-12-03 17:21:56.022466985 +0000 UTC m=+3039.436288883" watchObservedRunningTime="2025-12-03 17:21:56.024703777 +0000 UTC m=+3039.438525665" Dec 03 17:22:01 crc kubenswrapper[5002]: I1203 17:22:01.780332 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:22:01 crc kubenswrapper[5002]: I1203 17:22:01.780887 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:22:01 crc kubenswrapper[5002]: I1203 17:22:01.831018 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:22:02 crc kubenswrapper[5002]: I1203 17:22:02.104090 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:22:02 crc kubenswrapper[5002]: I1203 17:22:02.157331 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x7xv6"] Dec 03 17:22:04 crc kubenswrapper[5002]: I1203 17:22:04.067066 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-x7xv6" podUID="a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" containerName="registry-server" containerID="cri-o://ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca" gracePeriod=2 Dec 03 17:22:04 crc kubenswrapper[5002]: I1203 17:22:04.881868 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.059216 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5gf8\" (UniqueName: \"kubernetes.io/projected/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-kube-api-access-b5gf8\") pod \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\" (UID: \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\") " Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.059270 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-utilities\") pod \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\" (UID: \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\") " Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.059310 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-catalog-content\") pod \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\" (UID: \"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc\") " Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.060593 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-utilities" (OuterVolumeSpecName: "utilities") pod "a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" (UID: "a7613fc4-6d4b-4e07-9edf-15d51b4b58fc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.064793 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-kube-api-access-b5gf8" (OuterVolumeSpecName: "kube-api-access-b5gf8") pod "a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" (UID: "a7613fc4-6d4b-4e07-9edf-15d51b4b58fc"). InnerVolumeSpecName "kube-api-access-b5gf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.077397 5002 generic.go:334] "Generic (PLEG): container finished" podID="a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" containerID="ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca" exitCode=0 Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.077446 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7xv6" event={"ID":"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc","Type":"ContainerDied","Data":"ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca"} Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.077487 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7xv6" event={"ID":"a7613fc4-6d4b-4e07-9edf-15d51b4b58fc","Type":"ContainerDied","Data":"5399d38f716e1a251c22fb5193f646a4d1ed8f6dda0b90dc44d7fe04ce19f2b4"} Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.077482 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7xv6" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.077505 5002 scope.go:117] "RemoveContainer" containerID="ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.101443 5002 scope.go:117] "RemoveContainer" containerID="382d5fad9aac1659935cb956a1ce4be0f2ff07d5a39e4d220c6a76b81185578e" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.124440 5002 scope.go:117] "RemoveContainer" containerID="394634d3fb01809e76d839864defb917c2293e6231b42022debebc9b26008b13" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.146166 5002 scope.go:117] "RemoveContainer" containerID="ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca" Dec 03 17:22:05 crc kubenswrapper[5002]: E1203 17:22:05.146632 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca\": container with ID starting with ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca not found: ID does not exist" containerID="ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.146702 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca"} err="failed to get container status \"ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca\": rpc error: code = NotFound desc = could not find container \"ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca\": container with ID starting with ac0b6be3033a0e7e79517e3dde9c83179d937a659c5de9f024b7b6533c5031ca not found: ID does not exist" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.146773 5002 scope.go:117] "RemoveContainer" containerID="382d5fad9aac1659935cb956a1ce4be0f2ff07d5a39e4d220c6a76b81185578e" Dec 03 17:22:05 crc kubenswrapper[5002]: E1203 17:22:05.147095 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"382d5fad9aac1659935cb956a1ce4be0f2ff07d5a39e4d220c6a76b81185578e\": container with ID starting with 382d5fad9aac1659935cb956a1ce4be0f2ff07d5a39e4d220c6a76b81185578e not found: ID does not exist" containerID="382d5fad9aac1659935cb956a1ce4be0f2ff07d5a39e4d220c6a76b81185578e" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.147139 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"382d5fad9aac1659935cb956a1ce4be0f2ff07d5a39e4d220c6a76b81185578e"} err="failed to get container status \"382d5fad9aac1659935cb956a1ce4be0f2ff07d5a39e4d220c6a76b81185578e\": rpc error: code = NotFound desc = could not find container \"382d5fad9aac1659935cb956a1ce4be0f2ff07d5a39e4d220c6a76b81185578e\": container with ID starting with 382d5fad9aac1659935cb956a1ce4be0f2ff07d5a39e4d220c6a76b81185578e not found: ID does not exist" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.147164 5002 scope.go:117] "RemoveContainer" containerID="394634d3fb01809e76d839864defb917c2293e6231b42022debebc9b26008b13" Dec 03 17:22:05 crc kubenswrapper[5002]: E1203 17:22:05.147543 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"394634d3fb01809e76d839864defb917c2293e6231b42022debebc9b26008b13\": container with ID starting with 394634d3fb01809e76d839864defb917c2293e6231b42022debebc9b26008b13 not found: ID does not exist" containerID="394634d3fb01809e76d839864defb917c2293e6231b42022debebc9b26008b13" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.147581 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"394634d3fb01809e76d839864defb917c2293e6231b42022debebc9b26008b13"} err="failed to get container status \"394634d3fb01809e76d839864defb917c2293e6231b42022debebc9b26008b13\": rpc error: code = NotFound desc = could not find container \"394634d3fb01809e76d839864defb917c2293e6231b42022debebc9b26008b13\": container with ID starting with 394634d3fb01809e76d839864defb917c2293e6231b42022debebc9b26008b13 not found: ID does not exist" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.160993 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.161041 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5gf8\" (UniqueName: \"kubernetes.io/projected/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-kube-api-access-b5gf8\") on node \"crc\" DevicePath \"\"" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.382786 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" (UID: "a7613fc4-6d4b-4e07-9edf-15d51b4b58fc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.465346 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.712399 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x7xv6"] Dec 03 17:22:05 crc kubenswrapper[5002]: I1203 17:22:05.718377 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-x7xv6"] Dec 03 17:22:06 crc kubenswrapper[5002]: I1203 17:22:06.851396 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" path="/var/lib/kubelet/pods/a7613fc4-6d4b-4e07-9edf-15d51b4b58fc/volumes" Dec 03 17:22:20 crc kubenswrapper[5002]: I1203 17:22:20.916599 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:22:20 crc kubenswrapper[5002]: I1203 17:22:20.917218 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:22:50 crc kubenswrapper[5002]: I1203 17:22:50.916831 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:22:50 crc kubenswrapper[5002]: I1203 17:22:50.917443 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:23:20 crc kubenswrapper[5002]: I1203 17:23:20.917083 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:23:20 crc kubenswrapper[5002]: I1203 17:23:20.918186 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:23:20 crc kubenswrapper[5002]: I1203 17:23:20.918409 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 17:23:20 crc kubenswrapper[5002]: I1203 17:23:20.919529 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:23:20 crc kubenswrapper[5002]: I1203 17:23:20.919600 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" gracePeriod=600 Dec 03 17:23:21 crc kubenswrapper[5002]: E1203 17:23:21.046964 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:23:21 crc kubenswrapper[5002]: I1203 17:23:21.638253 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" exitCode=0 Dec 03 17:23:21 crc kubenswrapper[5002]: I1203 17:23:21.638334 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0"} Dec 03 17:23:21 crc kubenswrapper[5002]: I1203 17:23:21.638611 5002 scope.go:117] "RemoveContainer" containerID="4a513e2e0684fc729e62b69e04c7ac2fec634ec485b5be340e792484ab808e14" Dec 03 17:23:21 crc kubenswrapper[5002]: I1203 17:23:21.639517 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:23:21 crc kubenswrapper[5002]: E1203 17:23:21.641910 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:23:36 crc kubenswrapper[5002]: I1203 17:23:36.846773 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:23:36 crc kubenswrapper[5002]: E1203 17:23:36.848147 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:23:47 crc kubenswrapper[5002]: I1203 17:23:47.840480 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:23:47 crc kubenswrapper[5002]: E1203 17:23:47.841284 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:24:02 crc kubenswrapper[5002]: I1203 17:24:02.841602 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:24:02 crc kubenswrapper[5002]: E1203 17:24:02.842845 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:24:16 crc kubenswrapper[5002]: I1203 17:24:16.849547 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:24:16 crc kubenswrapper[5002]: E1203 17:24:16.850873 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:24:29 crc kubenswrapper[5002]: I1203 17:24:29.839700 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:24:29 crc kubenswrapper[5002]: E1203 17:24:29.840787 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:24:42 crc kubenswrapper[5002]: I1203 17:24:42.840974 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:24:42 crc kubenswrapper[5002]: E1203 17:24:42.843100 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:24:54 crc kubenswrapper[5002]: I1203 17:24:54.840890 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:24:54 crc kubenswrapper[5002]: E1203 17:24:54.842293 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:25:05 crc kubenswrapper[5002]: I1203 17:25:05.840373 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:25:05 crc kubenswrapper[5002]: E1203 17:25:05.841154 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:25:19 crc kubenswrapper[5002]: I1203 17:25:19.841191 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:25:19 crc kubenswrapper[5002]: E1203 17:25:19.842100 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:25:30 crc kubenswrapper[5002]: I1203 17:25:30.841457 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:25:30 crc kubenswrapper[5002]: E1203 17:25:30.842453 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:25:41 crc kubenswrapper[5002]: I1203 17:25:41.840133 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:25:41 crc kubenswrapper[5002]: E1203 17:25:41.840889 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:25:54 crc kubenswrapper[5002]: I1203 17:25:54.840861 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:25:54 crc kubenswrapper[5002]: E1203 17:25:54.841598 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:26:07 crc kubenswrapper[5002]: I1203 17:26:07.840841 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:26:07 crc kubenswrapper[5002]: E1203 17:26:07.841590 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:26:22 crc kubenswrapper[5002]: I1203 17:26:22.841327 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:26:22 crc kubenswrapper[5002]: E1203 17:26:22.842226 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:26:36 crc kubenswrapper[5002]: I1203 17:26:36.856501 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:26:36 crc kubenswrapper[5002]: E1203 17:26:36.857447 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:26:50 crc kubenswrapper[5002]: I1203 17:26:50.840452 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:26:50 crc kubenswrapper[5002]: E1203 17:26:50.841534 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.543524 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9kvkt"] Dec 03 17:26:55 crc kubenswrapper[5002]: E1203 17:26:55.544356 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" containerName="registry-server" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.544370 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" containerName="registry-server" Dec 03 17:26:55 crc kubenswrapper[5002]: E1203 17:26:55.544387 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" containerName="extract-content" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.544393 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" containerName="extract-content" Dec 03 17:26:55 crc kubenswrapper[5002]: E1203 17:26:55.544409 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" containerName="extract-utilities" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.544415 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" containerName="extract-utilities" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.544549 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7613fc4-6d4b-4e07-9edf-15d51b4b58fc" containerName="registry-server" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.545617 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.565231 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9kvkt"] Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.720363 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aa53982-7936-4367-ad77-6a9570243faf-utilities\") pod \"redhat-marketplace-9kvkt\" (UID: \"1aa53982-7936-4367-ad77-6a9570243faf\") " pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.720460 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aa53982-7936-4367-ad77-6a9570243faf-catalog-content\") pod \"redhat-marketplace-9kvkt\" (UID: \"1aa53982-7936-4367-ad77-6a9570243faf\") " pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.720603 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zhjp\" (UniqueName: \"kubernetes.io/projected/1aa53982-7936-4367-ad77-6a9570243faf-kube-api-access-6zhjp\") pod \"redhat-marketplace-9kvkt\" (UID: \"1aa53982-7936-4367-ad77-6a9570243faf\") " pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.821992 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zhjp\" (UniqueName: \"kubernetes.io/projected/1aa53982-7936-4367-ad77-6a9570243faf-kube-api-access-6zhjp\") pod \"redhat-marketplace-9kvkt\" (UID: \"1aa53982-7936-4367-ad77-6a9570243faf\") " pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.822086 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aa53982-7936-4367-ad77-6a9570243faf-utilities\") pod \"redhat-marketplace-9kvkt\" (UID: \"1aa53982-7936-4367-ad77-6a9570243faf\") " pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.822117 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aa53982-7936-4367-ad77-6a9570243faf-catalog-content\") pod \"redhat-marketplace-9kvkt\" (UID: \"1aa53982-7936-4367-ad77-6a9570243faf\") " pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.822516 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aa53982-7936-4367-ad77-6a9570243faf-catalog-content\") pod \"redhat-marketplace-9kvkt\" (UID: \"1aa53982-7936-4367-ad77-6a9570243faf\") " pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.822578 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aa53982-7936-4367-ad77-6a9570243faf-utilities\") pod \"redhat-marketplace-9kvkt\" (UID: \"1aa53982-7936-4367-ad77-6a9570243faf\") " pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.845814 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zhjp\" (UniqueName: \"kubernetes.io/projected/1aa53982-7936-4367-ad77-6a9570243faf-kube-api-access-6zhjp\") pod \"redhat-marketplace-9kvkt\" (UID: \"1aa53982-7936-4367-ad77-6a9570243faf\") " pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:26:55 crc kubenswrapper[5002]: I1203 17:26:55.925012 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:26:56 crc kubenswrapper[5002]: I1203 17:26:56.366696 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9kvkt"] Dec 03 17:26:56 crc kubenswrapper[5002]: W1203 17:26:56.377145 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1aa53982_7936_4367_ad77_6a9570243faf.slice/crio-dce9bfd2c9c9955902f314a03e18482aef48b47c757e013617ad72a2aac30b1d WatchSource:0}: Error finding container dce9bfd2c9c9955902f314a03e18482aef48b47c757e013617ad72a2aac30b1d: Status 404 returned error can't find the container with id dce9bfd2c9c9955902f314a03e18482aef48b47c757e013617ad72a2aac30b1d Dec 03 17:26:56 crc kubenswrapper[5002]: I1203 17:26:56.508312 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9kvkt" event={"ID":"1aa53982-7936-4367-ad77-6a9570243faf","Type":"ContainerStarted","Data":"dce9bfd2c9c9955902f314a03e18482aef48b47c757e013617ad72a2aac30b1d"} Dec 03 17:26:57 crc kubenswrapper[5002]: I1203 17:26:57.517346 5002 generic.go:334] "Generic (PLEG): container finished" podID="1aa53982-7936-4367-ad77-6a9570243faf" containerID="ac1bade073e1a09bcc6a9ab2b12ba1b2696faa466b7781efa7a6cf3d3c86b516" exitCode=0 Dec 03 17:26:57 crc kubenswrapper[5002]: I1203 17:26:57.517414 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9kvkt" event={"ID":"1aa53982-7936-4367-ad77-6a9570243faf","Type":"ContainerDied","Data":"ac1bade073e1a09bcc6a9ab2b12ba1b2696faa466b7781efa7a6cf3d3c86b516"} Dec 03 17:26:57 crc kubenswrapper[5002]: I1203 17:26:57.520190 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:26:58 crc kubenswrapper[5002]: I1203 17:26:58.527033 5002 generic.go:334] "Generic (PLEG): container finished" podID="1aa53982-7936-4367-ad77-6a9570243faf" containerID="c3ea0bc4ee5d07ea56989f1b350c59bf5e8b58957b49e11522ec08d880b8e4f4" exitCode=0 Dec 03 17:26:58 crc kubenswrapper[5002]: I1203 17:26:58.527233 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9kvkt" event={"ID":"1aa53982-7936-4367-ad77-6a9570243faf","Type":"ContainerDied","Data":"c3ea0bc4ee5d07ea56989f1b350c59bf5e8b58957b49e11522ec08d880b8e4f4"} Dec 03 17:26:59 crc kubenswrapper[5002]: I1203 17:26:59.538377 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9kvkt" event={"ID":"1aa53982-7936-4367-ad77-6a9570243faf","Type":"ContainerStarted","Data":"2df4ab202af64416f6a4bb1b63ef19411a135b2e189c8a9defa243e6ae0e4d5e"} Dec 03 17:26:59 crc kubenswrapper[5002]: I1203 17:26:59.570219 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9kvkt" podStartSLOduration=3.166863639 podStartE2EDuration="4.570196246s" podCreationTimestamp="2025-12-03 17:26:55 +0000 UTC" firstStartedPulling="2025-12-03 17:26:57.519966247 +0000 UTC m=+3340.933788135" lastFinishedPulling="2025-12-03 17:26:58.923298824 +0000 UTC m=+3342.337120742" observedRunningTime="2025-12-03 17:26:59.561717238 +0000 UTC m=+3342.975539136" watchObservedRunningTime="2025-12-03 17:26:59.570196246 +0000 UTC m=+3342.984018134" Dec 03 17:27:01 crc kubenswrapper[5002]: I1203 17:27:01.840614 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:27:01 crc kubenswrapper[5002]: E1203 17:27:01.841220 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:27:05 crc kubenswrapper[5002]: I1203 17:27:05.926369 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:27:05 crc kubenswrapper[5002]: I1203 17:27:05.928098 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:27:05 crc kubenswrapper[5002]: I1203 17:27:05.977458 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:27:06 crc kubenswrapper[5002]: I1203 17:27:06.629724 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:27:06 crc kubenswrapper[5002]: I1203 17:27:06.683430 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9kvkt"] Dec 03 17:27:08 crc kubenswrapper[5002]: I1203 17:27:08.601155 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9kvkt" podUID="1aa53982-7936-4367-ad77-6a9570243faf" containerName="registry-server" containerID="cri-o://2df4ab202af64416f6a4bb1b63ef19411a135b2e189c8a9defa243e6ae0e4d5e" gracePeriod=2 Dec 03 17:27:09 crc kubenswrapper[5002]: I1203 17:27:09.610250 5002 generic.go:334] "Generic (PLEG): container finished" podID="1aa53982-7936-4367-ad77-6a9570243faf" containerID="2df4ab202af64416f6a4bb1b63ef19411a135b2e189c8a9defa243e6ae0e4d5e" exitCode=0 Dec 03 17:27:09 crc kubenswrapper[5002]: I1203 17:27:09.610338 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9kvkt" event={"ID":"1aa53982-7936-4367-ad77-6a9570243faf","Type":"ContainerDied","Data":"2df4ab202af64416f6a4bb1b63ef19411a135b2e189c8a9defa243e6ae0e4d5e"} Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.158544 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.278993 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aa53982-7936-4367-ad77-6a9570243faf-catalog-content\") pod \"1aa53982-7936-4367-ad77-6a9570243faf\" (UID: \"1aa53982-7936-4367-ad77-6a9570243faf\") " Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.279323 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aa53982-7936-4367-ad77-6a9570243faf-utilities\") pod \"1aa53982-7936-4367-ad77-6a9570243faf\" (UID: \"1aa53982-7936-4367-ad77-6a9570243faf\") " Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.279557 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zhjp\" (UniqueName: \"kubernetes.io/projected/1aa53982-7936-4367-ad77-6a9570243faf-kube-api-access-6zhjp\") pod \"1aa53982-7936-4367-ad77-6a9570243faf\" (UID: \"1aa53982-7936-4367-ad77-6a9570243faf\") " Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.281302 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1aa53982-7936-4367-ad77-6a9570243faf-utilities" (OuterVolumeSpecName: "utilities") pod "1aa53982-7936-4367-ad77-6a9570243faf" (UID: "1aa53982-7936-4367-ad77-6a9570243faf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.284885 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1aa53982-7936-4367-ad77-6a9570243faf-kube-api-access-6zhjp" (OuterVolumeSpecName: "kube-api-access-6zhjp") pod "1aa53982-7936-4367-ad77-6a9570243faf" (UID: "1aa53982-7936-4367-ad77-6a9570243faf"). InnerVolumeSpecName "kube-api-access-6zhjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.307323 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1aa53982-7936-4367-ad77-6a9570243faf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1aa53982-7936-4367-ad77-6a9570243faf" (UID: "1aa53982-7936-4367-ad77-6a9570243faf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.382401 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zhjp\" (UniqueName: \"kubernetes.io/projected/1aa53982-7936-4367-ad77-6a9570243faf-kube-api-access-6zhjp\") on node \"crc\" DevicePath \"\"" Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.382870 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aa53982-7936-4367-ad77-6a9570243faf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.382884 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aa53982-7936-4367-ad77-6a9570243faf-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.621406 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9kvkt" event={"ID":"1aa53982-7936-4367-ad77-6a9570243faf","Type":"ContainerDied","Data":"dce9bfd2c9c9955902f314a03e18482aef48b47c757e013617ad72a2aac30b1d"} Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.621471 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9kvkt" Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.621477 5002 scope.go:117] "RemoveContainer" containerID="2df4ab202af64416f6a4bb1b63ef19411a135b2e189c8a9defa243e6ae0e4d5e" Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.651095 5002 scope.go:117] "RemoveContainer" containerID="c3ea0bc4ee5d07ea56989f1b350c59bf5e8b58957b49e11522ec08d880b8e4f4" Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.656211 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9kvkt"] Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.663087 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9kvkt"] Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.692009 5002 scope.go:117] "RemoveContainer" containerID="ac1bade073e1a09bcc6a9ab2b12ba1b2696faa466b7781efa7a6cf3d3c86b516" Dec 03 17:27:10 crc kubenswrapper[5002]: I1203 17:27:10.850189 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1aa53982-7936-4367-ad77-6a9570243faf" path="/var/lib/kubelet/pods/1aa53982-7936-4367-ad77-6a9570243faf/volumes" Dec 03 17:27:14 crc kubenswrapper[5002]: I1203 17:27:14.840700 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:27:14 crc kubenswrapper[5002]: E1203 17:27:14.841821 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:27:27 crc kubenswrapper[5002]: I1203 17:27:27.840857 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:27:27 crc kubenswrapper[5002]: E1203 17:27:27.843052 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:27:40 crc kubenswrapper[5002]: I1203 17:27:40.840215 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:27:40 crc kubenswrapper[5002]: E1203 17:27:40.841162 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:27:52 crc kubenswrapper[5002]: I1203 17:27:52.841215 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:27:52 crc kubenswrapper[5002]: E1203 17:27:52.842512 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:28:04 crc kubenswrapper[5002]: I1203 17:28:04.840636 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:28:04 crc kubenswrapper[5002]: E1203 17:28:04.841779 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:28:16 crc kubenswrapper[5002]: I1203 17:28:16.846510 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:28:16 crc kubenswrapper[5002]: E1203 17:28:16.847213 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:28:30 crc kubenswrapper[5002]: I1203 17:28:30.840608 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:28:31 crc kubenswrapper[5002]: I1203 17:28:31.258464 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"50724fa1263857bf097e3ea4acc07bf182caf229631c15cda7c9978261b83fc3"} Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.462732 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pjn7b"] Dec 03 17:29:03 crc kubenswrapper[5002]: E1203 17:29:03.464135 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aa53982-7936-4367-ad77-6a9570243faf" containerName="registry-server" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.464158 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aa53982-7936-4367-ad77-6a9570243faf" containerName="registry-server" Dec 03 17:29:03 crc kubenswrapper[5002]: E1203 17:29:03.464175 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aa53982-7936-4367-ad77-6a9570243faf" containerName="extract-content" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.464186 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aa53982-7936-4367-ad77-6a9570243faf" containerName="extract-content" Dec 03 17:29:03 crc kubenswrapper[5002]: E1203 17:29:03.464213 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aa53982-7936-4367-ad77-6a9570243faf" containerName="extract-utilities" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.464225 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aa53982-7936-4367-ad77-6a9570243faf" containerName="extract-utilities" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.464454 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="1aa53982-7936-4367-ad77-6a9570243faf" containerName="registry-server" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.466135 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.490384 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pjn7b"] Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.561398 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br6s4\" (UniqueName: \"kubernetes.io/projected/42eb62f3-fd57-4374-87f7-088feb7e86f3-kube-api-access-br6s4\") pod \"certified-operators-pjn7b\" (UID: \"42eb62f3-fd57-4374-87f7-088feb7e86f3\") " pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.561511 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42eb62f3-fd57-4374-87f7-088feb7e86f3-utilities\") pod \"certified-operators-pjn7b\" (UID: \"42eb62f3-fd57-4374-87f7-088feb7e86f3\") " pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.561589 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42eb62f3-fd57-4374-87f7-088feb7e86f3-catalog-content\") pod \"certified-operators-pjn7b\" (UID: \"42eb62f3-fd57-4374-87f7-088feb7e86f3\") " pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.663309 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br6s4\" (UniqueName: \"kubernetes.io/projected/42eb62f3-fd57-4374-87f7-088feb7e86f3-kube-api-access-br6s4\") pod \"certified-operators-pjn7b\" (UID: \"42eb62f3-fd57-4374-87f7-088feb7e86f3\") " pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.663399 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42eb62f3-fd57-4374-87f7-088feb7e86f3-utilities\") pod \"certified-operators-pjn7b\" (UID: \"42eb62f3-fd57-4374-87f7-088feb7e86f3\") " pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.663431 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42eb62f3-fd57-4374-87f7-088feb7e86f3-catalog-content\") pod \"certified-operators-pjn7b\" (UID: \"42eb62f3-fd57-4374-87f7-088feb7e86f3\") " pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.664119 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42eb62f3-fd57-4374-87f7-088feb7e86f3-catalog-content\") pod \"certified-operators-pjn7b\" (UID: \"42eb62f3-fd57-4374-87f7-088feb7e86f3\") " pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.664286 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42eb62f3-fd57-4374-87f7-088feb7e86f3-utilities\") pod \"certified-operators-pjn7b\" (UID: \"42eb62f3-fd57-4374-87f7-088feb7e86f3\") " pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.681478 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br6s4\" (UniqueName: \"kubernetes.io/projected/42eb62f3-fd57-4374-87f7-088feb7e86f3-kube-api-access-br6s4\") pod \"certified-operators-pjn7b\" (UID: \"42eb62f3-fd57-4374-87f7-088feb7e86f3\") " pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:03 crc kubenswrapper[5002]: I1203 17:29:03.806856 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:04 crc kubenswrapper[5002]: I1203 17:29:04.339724 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pjn7b"] Dec 03 17:29:04 crc kubenswrapper[5002]: I1203 17:29:04.529413 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pjn7b" event={"ID":"42eb62f3-fd57-4374-87f7-088feb7e86f3","Type":"ContainerStarted","Data":"2f4ef6809603175efa579c695543289f44c010842ce0b83a9ba9badf926e666a"} Dec 03 17:29:04 crc kubenswrapper[5002]: I1203 17:29:04.529456 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pjn7b" event={"ID":"42eb62f3-fd57-4374-87f7-088feb7e86f3","Type":"ContainerStarted","Data":"509cd082b97c4f362548d80f564c4cda92358551964478c6be4d9eb3b9c566c7"} Dec 03 17:29:05 crc kubenswrapper[5002]: I1203 17:29:05.538859 5002 generic.go:334] "Generic (PLEG): container finished" podID="42eb62f3-fd57-4374-87f7-088feb7e86f3" containerID="2f4ef6809603175efa579c695543289f44c010842ce0b83a9ba9badf926e666a" exitCode=0 Dec 03 17:29:05 crc kubenswrapper[5002]: I1203 17:29:05.538911 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pjn7b" event={"ID":"42eb62f3-fd57-4374-87f7-088feb7e86f3","Type":"ContainerDied","Data":"2f4ef6809603175efa579c695543289f44c010842ce0b83a9ba9badf926e666a"} Dec 03 17:29:05 crc kubenswrapper[5002]: I1203 17:29:05.539264 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pjn7b" event={"ID":"42eb62f3-fd57-4374-87f7-088feb7e86f3","Type":"ContainerStarted","Data":"6baf5e5fa4f46edfd8b7c65f2679b1b8075659e47daf2ae282944a8af3a98cf8"} Dec 03 17:29:06 crc kubenswrapper[5002]: I1203 17:29:06.550285 5002 generic.go:334] "Generic (PLEG): container finished" podID="42eb62f3-fd57-4374-87f7-088feb7e86f3" containerID="6baf5e5fa4f46edfd8b7c65f2679b1b8075659e47daf2ae282944a8af3a98cf8" exitCode=0 Dec 03 17:29:06 crc kubenswrapper[5002]: I1203 17:29:06.550348 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pjn7b" event={"ID":"42eb62f3-fd57-4374-87f7-088feb7e86f3","Type":"ContainerDied","Data":"6baf5e5fa4f46edfd8b7c65f2679b1b8075659e47daf2ae282944a8af3a98cf8"} Dec 03 17:29:07 crc kubenswrapper[5002]: I1203 17:29:07.559574 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pjn7b" event={"ID":"42eb62f3-fd57-4374-87f7-088feb7e86f3","Type":"ContainerStarted","Data":"4438bb2ece0e19c01f1145b40c85a18441cac748d80d98a036eebe1e1cd847d7"} Dec 03 17:29:07 crc kubenswrapper[5002]: I1203 17:29:07.582857 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pjn7b" podStartSLOduration=2.184670957 podStartE2EDuration="4.582836942s" podCreationTimestamp="2025-12-03 17:29:03 +0000 UTC" firstStartedPulling="2025-12-03 17:29:04.531354681 +0000 UTC m=+3467.945176569" lastFinishedPulling="2025-12-03 17:29:06.929520666 +0000 UTC m=+3470.343342554" observedRunningTime="2025-12-03 17:29:07.577714064 +0000 UTC m=+3470.991535972" watchObservedRunningTime="2025-12-03 17:29:07.582836942 +0000 UTC m=+3470.996658840" Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.654362 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2gjg9"] Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.656208 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.663131 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2gjg9"] Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.837693 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/688200bc-5307-46da-8d3f-93171940b89a-utilities\") pod \"community-operators-2gjg9\" (UID: \"688200bc-5307-46da-8d3f-93171940b89a\") " pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.837795 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppvrh\" (UniqueName: \"kubernetes.io/projected/688200bc-5307-46da-8d3f-93171940b89a-kube-api-access-ppvrh\") pod \"community-operators-2gjg9\" (UID: \"688200bc-5307-46da-8d3f-93171940b89a\") " pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.837822 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/688200bc-5307-46da-8d3f-93171940b89a-catalog-content\") pod \"community-operators-2gjg9\" (UID: \"688200bc-5307-46da-8d3f-93171940b89a\") " pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.939608 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/688200bc-5307-46da-8d3f-93171940b89a-utilities\") pod \"community-operators-2gjg9\" (UID: \"688200bc-5307-46da-8d3f-93171940b89a\") " pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.940062 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/688200bc-5307-46da-8d3f-93171940b89a-utilities\") pod \"community-operators-2gjg9\" (UID: \"688200bc-5307-46da-8d3f-93171940b89a\") " pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.940092 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppvrh\" (UniqueName: \"kubernetes.io/projected/688200bc-5307-46da-8d3f-93171940b89a-kube-api-access-ppvrh\") pod \"community-operators-2gjg9\" (UID: \"688200bc-5307-46da-8d3f-93171940b89a\") " pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.940130 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/688200bc-5307-46da-8d3f-93171940b89a-catalog-content\") pod \"community-operators-2gjg9\" (UID: \"688200bc-5307-46da-8d3f-93171940b89a\") " pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.940391 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/688200bc-5307-46da-8d3f-93171940b89a-catalog-content\") pod \"community-operators-2gjg9\" (UID: \"688200bc-5307-46da-8d3f-93171940b89a\") " pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.965123 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppvrh\" (UniqueName: \"kubernetes.io/projected/688200bc-5307-46da-8d3f-93171940b89a-kube-api-access-ppvrh\") pod \"community-operators-2gjg9\" (UID: \"688200bc-5307-46da-8d3f-93171940b89a\") " pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:08 crc kubenswrapper[5002]: I1203 17:29:08.985909 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:09 crc kubenswrapper[5002]: I1203 17:29:09.267045 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2gjg9"] Dec 03 17:29:09 crc kubenswrapper[5002]: W1203 17:29:09.269208 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod688200bc_5307_46da_8d3f_93171940b89a.slice/crio-bbf7790575563175d8c60f3d54279363ef373fd8d3dba5d7b145c9a2002d607b WatchSource:0}: Error finding container bbf7790575563175d8c60f3d54279363ef373fd8d3dba5d7b145c9a2002d607b: Status 404 returned error can't find the container with id bbf7790575563175d8c60f3d54279363ef373fd8d3dba5d7b145c9a2002d607b Dec 03 17:29:09 crc kubenswrapper[5002]: I1203 17:29:09.576665 5002 generic.go:334] "Generic (PLEG): container finished" podID="688200bc-5307-46da-8d3f-93171940b89a" containerID="c7e384ab5c5eb48b7a58aa9b1f9ac9ad8e0fd8fd4072fad68e03b677ea55e44d" exitCode=0 Dec 03 17:29:09 crc kubenswrapper[5002]: I1203 17:29:09.576721 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gjg9" event={"ID":"688200bc-5307-46da-8d3f-93171940b89a","Type":"ContainerDied","Data":"c7e384ab5c5eb48b7a58aa9b1f9ac9ad8e0fd8fd4072fad68e03b677ea55e44d"} Dec 03 17:29:09 crc kubenswrapper[5002]: I1203 17:29:09.576774 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gjg9" event={"ID":"688200bc-5307-46da-8d3f-93171940b89a","Type":"ContainerStarted","Data":"bbf7790575563175d8c60f3d54279363ef373fd8d3dba5d7b145c9a2002d607b"} Dec 03 17:29:10 crc kubenswrapper[5002]: I1203 17:29:10.597779 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gjg9" event={"ID":"688200bc-5307-46da-8d3f-93171940b89a","Type":"ContainerStarted","Data":"c09db92a5bed11393ac503444ca390a3c6836d33cbff75503d5d120391e01d1b"} Dec 03 17:29:11 crc kubenswrapper[5002]: I1203 17:29:11.607995 5002 generic.go:334] "Generic (PLEG): container finished" podID="688200bc-5307-46da-8d3f-93171940b89a" containerID="c09db92a5bed11393ac503444ca390a3c6836d33cbff75503d5d120391e01d1b" exitCode=0 Dec 03 17:29:11 crc kubenswrapper[5002]: I1203 17:29:11.608067 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gjg9" event={"ID":"688200bc-5307-46da-8d3f-93171940b89a","Type":"ContainerDied","Data":"c09db92a5bed11393ac503444ca390a3c6836d33cbff75503d5d120391e01d1b"} Dec 03 17:29:12 crc kubenswrapper[5002]: I1203 17:29:12.616396 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gjg9" event={"ID":"688200bc-5307-46da-8d3f-93171940b89a","Type":"ContainerStarted","Data":"5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a"} Dec 03 17:29:12 crc kubenswrapper[5002]: I1203 17:29:12.634333 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2gjg9" podStartSLOduration=2.236449681 podStartE2EDuration="4.634315489s" podCreationTimestamp="2025-12-03 17:29:08 +0000 UTC" firstStartedPulling="2025-12-03 17:29:09.57809056 +0000 UTC m=+3472.991912448" lastFinishedPulling="2025-12-03 17:29:11.975956368 +0000 UTC m=+3475.389778256" observedRunningTime="2025-12-03 17:29:12.632221942 +0000 UTC m=+3476.046043830" watchObservedRunningTime="2025-12-03 17:29:12.634315489 +0000 UTC m=+3476.048137377" Dec 03 17:29:13 crc kubenswrapper[5002]: I1203 17:29:13.807154 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:13 crc kubenswrapper[5002]: I1203 17:29:13.807205 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:13 crc kubenswrapper[5002]: I1203 17:29:13.887974 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:14 crc kubenswrapper[5002]: I1203 17:29:14.691707 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:15 crc kubenswrapper[5002]: I1203 17:29:15.029279 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pjn7b"] Dec 03 17:29:16 crc kubenswrapper[5002]: I1203 17:29:16.651105 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pjn7b" podUID="42eb62f3-fd57-4374-87f7-088feb7e86f3" containerName="registry-server" containerID="cri-o://4438bb2ece0e19c01f1145b40c85a18441cac748d80d98a036eebe1e1cd847d7" gracePeriod=2 Dec 03 17:29:17 crc kubenswrapper[5002]: I1203 17:29:17.663187 5002 generic.go:334] "Generic (PLEG): container finished" podID="42eb62f3-fd57-4374-87f7-088feb7e86f3" containerID="4438bb2ece0e19c01f1145b40c85a18441cac748d80d98a036eebe1e1cd847d7" exitCode=0 Dec 03 17:29:17 crc kubenswrapper[5002]: I1203 17:29:17.663242 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pjn7b" event={"ID":"42eb62f3-fd57-4374-87f7-088feb7e86f3","Type":"ContainerDied","Data":"4438bb2ece0e19c01f1145b40c85a18441cac748d80d98a036eebe1e1cd847d7"} Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.164473 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.271104 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br6s4\" (UniqueName: \"kubernetes.io/projected/42eb62f3-fd57-4374-87f7-088feb7e86f3-kube-api-access-br6s4\") pod \"42eb62f3-fd57-4374-87f7-088feb7e86f3\" (UID: \"42eb62f3-fd57-4374-87f7-088feb7e86f3\") " Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.271242 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42eb62f3-fd57-4374-87f7-088feb7e86f3-catalog-content\") pod \"42eb62f3-fd57-4374-87f7-088feb7e86f3\" (UID: \"42eb62f3-fd57-4374-87f7-088feb7e86f3\") " Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.271944 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42eb62f3-fd57-4374-87f7-088feb7e86f3-utilities\") pod \"42eb62f3-fd57-4374-87f7-088feb7e86f3\" (UID: \"42eb62f3-fd57-4374-87f7-088feb7e86f3\") " Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.273618 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42eb62f3-fd57-4374-87f7-088feb7e86f3-utilities" (OuterVolumeSpecName: "utilities") pod "42eb62f3-fd57-4374-87f7-088feb7e86f3" (UID: "42eb62f3-fd57-4374-87f7-088feb7e86f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.284984 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42eb62f3-fd57-4374-87f7-088feb7e86f3-kube-api-access-br6s4" (OuterVolumeSpecName: "kube-api-access-br6s4") pod "42eb62f3-fd57-4374-87f7-088feb7e86f3" (UID: "42eb62f3-fd57-4374-87f7-088feb7e86f3"). InnerVolumeSpecName "kube-api-access-br6s4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.327477 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42eb62f3-fd57-4374-87f7-088feb7e86f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "42eb62f3-fd57-4374-87f7-088feb7e86f3" (UID: "42eb62f3-fd57-4374-87f7-088feb7e86f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.374146 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br6s4\" (UniqueName: \"kubernetes.io/projected/42eb62f3-fd57-4374-87f7-088feb7e86f3-kube-api-access-br6s4\") on node \"crc\" DevicePath \"\"" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.374179 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42eb62f3-fd57-4374-87f7-088feb7e86f3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.374189 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42eb62f3-fd57-4374-87f7-088feb7e86f3-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.673461 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pjn7b" event={"ID":"42eb62f3-fd57-4374-87f7-088feb7e86f3","Type":"ContainerDied","Data":"509cd082b97c4f362548d80f564c4cda92358551964478c6be4d9eb3b9c566c7"} Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.673543 5002 scope.go:117] "RemoveContainer" containerID="4438bb2ece0e19c01f1145b40c85a18441cac748d80d98a036eebe1e1cd847d7" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.674354 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pjn7b" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.696190 5002 scope.go:117] "RemoveContainer" containerID="6baf5e5fa4f46edfd8b7c65f2679b1b8075659e47daf2ae282944a8af3a98cf8" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.716129 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pjn7b"] Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.722424 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pjn7b"] Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.747141 5002 scope.go:117] "RemoveContainer" containerID="2f4ef6809603175efa579c695543289f44c010842ce0b83a9ba9badf926e666a" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.854478 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42eb62f3-fd57-4374-87f7-088feb7e86f3" path="/var/lib/kubelet/pods/42eb62f3-fd57-4374-87f7-088feb7e86f3/volumes" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.987182 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:18 crc kubenswrapper[5002]: I1203 17:29:18.987405 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:19 crc kubenswrapper[5002]: I1203 17:29:19.040401 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:19 crc kubenswrapper[5002]: I1203 17:29:19.728076 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:21 crc kubenswrapper[5002]: I1203 17:29:21.435240 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2gjg9"] Dec 03 17:29:22 crc kubenswrapper[5002]: I1203 17:29:22.706602 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2gjg9" podUID="688200bc-5307-46da-8d3f-93171940b89a" containerName="registry-server" containerID="cri-o://5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a" gracePeriod=2 Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.112636 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.254106 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/688200bc-5307-46da-8d3f-93171940b89a-catalog-content\") pod \"688200bc-5307-46da-8d3f-93171940b89a\" (UID: \"688200bc-5307-46da-8d3f-93171940b89a\") " Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.254333 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppvrh\" (UniqueName: \"kubernetes.io/projected/688200bc-5307-46da-8d3f-93171940b89a-kube-api-access-ppvrh\") pod \"688200bc-5307-46da-8d3f-93171940b89a\" (UID: \"688200bc-5307-46da-8d3f-93171940b89a\") " Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.254364 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/688200bc-5307-46da-8d3f-93171940b89a-utilities\") pod \"688200bc-5307-46da-8d3f-93171940b89a\" (UID: \"688200bc-5307-46da-8d3f-93171940b89a\") " Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.255286 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/688200bc-5307-46da-8d3f-93171940b89a-utilities" (OuterVolumeSpecName: "utilities") pod "688200bc-5307-46da-8d3f-93171940b89a" (UID: "688200bc-5307-46da-8d3f-93171940b89a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.261928 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/688200bc-5307-46da-8d3f-93171940b89a-kube-api-access-ppvrh" (OuterVolumeSpecName: "kube-api-access-ppvrh") pod "688200bc-5307-46da-8d3f-93171940b89a" (UID: "688200bc-5307-46da-8d3f-93171940b89a"). InnerVolumeSpecName "kube-api-access-ppvrh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.304349 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/688200bc-5307-46da-8d3f-93171940b89a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "688200bc-5307-46da-8d3f-93171940b89a" (UID: "688200bc-5307-46da-8d3f-93171940b89a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.355849 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/688200bc-5307-46da-8d3f-93171940b89a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.355887 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppvrh\" (UniqueName: \"kubernetes.io/projected/688200bc-5307-46da-8d3f-93171940b89a-kube-api-access-ppvrh\") on node \"crc\" DevicePath \"\"" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.355903 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/688200bc-5307-46da-8d3f-93171940b89a-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.716583 5002 generic.go:334] "Generic (PLEG): container finished" podID="688200bc-5307-46da-8d3f-93171940b89a" containerID="5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a" exitCode=0 Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.716635 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gjg9" event={"ID":"688200bc-5307-46da-8d3f-93171940b89a","Type":"ContainerDied","Data":"5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a"} Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.716670 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gjg9" event={"ID":"688200bc-5307-46da-8d3f-93171940b89a","Type":"ContainerDied","Data":"bbf7790575563175d8c60f3d54279363ef373fd8d3dba5d7b145c9a2002d607b"} Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.716696 5002 scope.go:117] "RemoveContainer" containerID="5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.716880 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gjg9" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.759846 5002 scope.go:117] "RemoveContainer" containerID="c09db92a5bed11393ac503444ca390a3c6836d33cbff75503d5d120391e01d1b" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.761905 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2gjg9"] Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.769113 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2gjg9"] Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.782270 5002 scope.go:117] "RemoveContainer" containerID="c7e384ab5c5eb48b7a58aa9b1f9ac9ad8e0fd8fd4072fad68e03b677ea55e44d" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.813181 5002 scope.go:117] "RemoveContainer" containerID="5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a" Dec 03 17:29:23 crc kubenswrapper[5002]: E1203 17:29:23.813780 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a\": container with ID starting with 5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a not found: ID does not exist" containerID="5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.813851 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a"} err="failed to get container status \"5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a\": rpc error: code = NotFound desc = could not find container \"5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a\": container with ID starting with 5c2a02119b3c4874200b4ae46714033dd47141eedd6ebe382fc540ad93d99e0a not found: ID does not exist" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.813892 5002 scope.go:117] "RemoveContainer" containerID="c09db92a5bed11393ac503444ca390a3c6836d33cbff75503d5d120391e01d1b" Dec 03 17:29:23 crc kubenswrapper[5002]: E1203 17:29:23.814443 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c09db92a5bed11393ac503444ca390a3c6836d33cbff75503d5d120391e01d1b\": container with ID starting with c09db92a5bed11393ac503444ca390a3c6836d33cbff75503d5d120391e01d1b not found: ID does not exist" containerID="c09db92a5bed11393ac503444ca390a3c6836d33cbff75503d5d120391e01d1b" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.814500 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c09db92a5bed11393ac503444ca390a3c6836d33cbff75503d5d120391e01d1b"} err="failed to get container status \"c09db92a5bed11393ac503444ca390a3c6836d33cbff75503d5d120391e01d1b\": rpc error: code = NotFound desc = could not find container \"c09db92a5bed11393ac503444ca390a3c6836d33cbff75503d5d120391e01d1b\": container with ID starting with c09db92a5bed11393ac503444ca390a3c6836d33cbff75503d5d120391e01d1b not found: ID does not exist" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.814535 5002 scope.go:117] "RemoveContainer" containerID="c7e384ab5c5eb48b7a58aa9b1f9ac9ad8e0fd8fd4072fad68e03b677ea55e44d" Dec 03 17:29:23 crc kubenswrapper[5002]: E1203 17:29:23.814953 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7e384ab5c5eb48b7a58aa9b1f9ac9ad8e0fd8fd4072fad68e03b677ea55e44d\": container with ID starting with c7e384ab5c5eb48b7a58aa9b1f9ac9ad8e0fd8fd4072fad68e03b677ea55e44d not found: ID does not exist" containerID="c7e384ab5c5eb48b7a58aa9b1f9ac9ad8e0fd8fd4072fad68e03b677ea55e44d" Dec 03 17:29:23 crc kubenswrapper[5002]: I1203 17:29:23.814991 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7e384ab5c5eb48b7a58aa9b1f9ac9ad8e0fd8fd4072fad68e03b677ea55e44d"} err="failed to get container status \"c7e384ab5c5eb48b7a58aa9b1f9ac9ad8e0fd8fd4072fad68e03b677ea55e44d\": rpc error: code = NotFound desc = could not find container \"c7e384ab5c5eb48b7a58aa9b1f9ac9ad8e0fd8fd4072fad68e03b677ea55e44d\": container with ID starting with c7e384ab5c5eb48b7a58aa9b1f9ac9ad8e0fd8fd4072fad68e03b677ea55e44d not found: ID does not exist" Dec 03 17:29:24 crc kubenswrapper[5002]: I1203 17:29:24.852570 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="688200bc-5307-46da-8d3f-93171940b89a" path="/var/lib/kubelet/pods/688200bc-5307-46da-8d3f-93171940b89a/volumes" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.165976 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6"] Dec 03 17:30:00 crc kubenswrapper[5002]: E1203 17:30:00.168978 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="688200bc-5307-46da-8d3f-93171940b89a" containerName="extract-content" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.169133 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="688200bc-5307-46da-8d3f-93171940b89a" containerName="extract-content" Dec 03 17:30:00 crc kubenswrapper[5002]: E1203 17:30:00.169254 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="688200bc-5307-46da-8d3f-93171940b89a" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.169345 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="688200bc-5307-46da-8d3f-93171940b89a" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[5002]: E1203 17:30:00.169434 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="688200bc-5307-46da-8d3f-93171940b89a" containerName="extract-utilities" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.169509 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="688200bc-5307-46da-8d3f-93171940b89a" containerName="extract-utilities" Dec 03 17:30:00 crc kubenswrapper[5002]: E1203 17:30:00.169595 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42eb62f3-fd57-4374-87f7-088feb7e86f3" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.169670 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="42eb62f3-fd57-4374-87f7-088feb7e86f3" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[5002]: E1203 17:30:00.169817 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42eb62f3-fd57-4374-87f7-088feb7e86f3" containerName="extract-utilities" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.169937 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="42eb62f3-fd57-4374-87f7-088feb7e86f3" containerName="extract-utilities" Dec 03 17:30:00 crc kubenswrapper[5002]: E1203 17:30:00.170030 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42eb62f3-fd57-4374-87f7-088feb7e86f3" containerName="extract-content" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.170115 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="42eb62f3-fd57-4374-87f7-088feb7e86f3" containerName="extract-content" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.170423 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="688200bc-5307-46da-8d3f-93171940b89a" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.170622 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="42eb62f3-fd57-4374-87f7-088feb7e86f3" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.171369 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.183703 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.185713 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6"] Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.184020 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.255397 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2pqr\" (UniqueName: \"kubernetes.io/projected/c5fa8512-9046-4b25-960f-5b9c4774c48b-kube-api-access-c2pqr\") pod \"collect-profiles-29413050-pz7m6\" (UID: \"c5fa8512-9046-4b25-960f-5b9c4774c48b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.255490 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5fa8512-9046-4b25-960f-5b9c4774c48b-secret-volume\") pod \"collect-profiles-29413050-pz7m6\" (UID: \"c5fa8512-9046-4b25-960f-5b9c4774c48b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.255553 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5fa8512-9046-4b25-960f-5b9c4774c48b-config-volume\") pod \"collect-profiles-29413050-pz7m6\" (UID: \"c5fa8512-9046-4b25-960f-5b9c4774c48b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.357586 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2pqr\" (UniqueName: \"kubernetes.io/projected/c5fa8512-9046-4b25-960f-5b9c4774c48b-kube-api-access-c2pqr\") pod \"collect-profiles-29413050-pz7m6\" (UID: \"c5fa8512-9046-4b25-960f-5b9c4774c48b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.357725 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5fa8512-9046-4b25-960f-5b9c4774c48b-secret-volume\") pod \"collect-profiles-29413050-pz7m6\" (UID: \"c5fa8512-9046-4b25-960f-5b9c4774c48b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.357865 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5fa8512-9046-4b25-960f-5b9c4774c48b-config-volume\") pod \"collect-profiles-29413050-pz7m6\" (UID: \"c5fa8512-9046-4b25-960f-5b9c4774c48b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.359702 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5fa8512-9046-4b25-960f-5b9c4774c48b-config-volume\") pod \"collect-profiles-29413050-pz7m6\" (UID: \"c5fa8512-9046-4b25-960f-5b9c4774c48b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.368632 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5fa8512-9046-4b25-960f-5b9c4774c48b-secret-volume\") pod \"collect-profiles-29413050-pz7m6\" (UID: \"c5fa8512-9046-4b25-960f-5b9c4774c48b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.381899 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2pqr\" (UniqueName: \"kubernetes.io/projected/c5fa8512-9046-4b25-960f-5b9c4774c48b-kube-api-access-c2pqr\") pod \"collect-profiles-29413050-pz7m6\" (UID: \"c5fa8512-9046-4b25-960f-5b9c4774c48b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.491062 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:00 crc kubenswrapper[5002]: I1203 17:30:00.906177 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6"] Dec 03 17:30:01 crc kubenswrapper[5002]: I1203 17:30:01.050939 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" event={"ID":"c5fa8512-9046-4b25-960f-5b9c4774c48b","Type":"ContainerStarted","Data":"4f4d3b4ba55872f70a36c4e2bda6e0ecf2857f0616205c336de7ee549af48e0b"} Dec 03 17:30:02 crc kubenswrapper[5002]: I1203 17:30:02.060510 5002 generic.go:334] "Generic (PLEG): container finished" podID="c5fa8512-9046-4b25-960f-5b9c4774c48b" containerID="ea28dc6b7dd75477d2ce83d978b4b1152984b363d7f393d7522c487bc0b499b1" exitCode=0 Dec 03 17:30:02 crc kubenswrapper[5002]: I1203 17:30:02.060587 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" event={"ID":"c5fa8512-9046-4b25-960f-5b9c4774c48b","Type":"ContainerDied","Data":"ea28dc6b7dd75477d2ce83d978b4b1152984b363d7f393d7522c487bc0b499b1"} Dec 03 17:30:03 crc kubenswrapper[5002]: I1203 17:30:03.382172 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:03 crc kubenswrapper[5002]: I1203 17:30:03.505069 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2pqr\" (UniqueName: \"kubernetes.io/projected/c5fa8512-9046-4b25-960f-5b9c4774c48b-kube-api-access-c2pqr\") pod \"c5fa8512-9046-4b25-960f-5b9c4774c48b\" (UID: \"c5fa8512-9046-4b25-960f-5b9c4774c48b\") " Dec 03 17:30:03 crc kubenswrapper[5002]: I1203 17:30:03.505133 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5fa8512-9046-4b25-960f-5b9c4774c48b-config-volume\") pod \"c5fa8512-9046-4b25-960f-5b9c4774c48b\" (UID: \"c5fa8512-9046-4b25-960f-5b9c4774c48b\") " Dec 03 17:30:03 crc kubenswrapper[5002]: I1203 17:30:03.505174 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5fa8512-9046-4b25-960f-5b9c4774c48b-secret-volume\") pod \"c5fa8512-9046-4b25-960f-5b9c4774c48b\" (UID: \"c5fa8512-9046-4b25-960f-5b9c4774c48b\") " Dec 03 17:30:03 crc kubenswrapper[5002]: I1203 17:30:03.507045 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5fa8512-9046-4b25-960f-5b9c4774c48b-config-volume" (OuterVolumeSpecName: "config-volume") pod "c5fa8512-9046-4b25-960f-5b9c4774c48b" (UID: "c5fa8512-9046-4b25-960f-5b9c4774c48b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:30:03 crc kubenswrapper[5002]: I1203 17:30:03.514658 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5fa8512-9046-4b25-960f-5b9c4774c48b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c5fa8512-9046-4b25-960f-5b9c4774c48b" (UID: "c5fa8512-9046-4b25-960f-5b9c4774c48b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:30:03 crc kubenswrapper[5002]: I1203 17:30:03.516399 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5fa8512-9046-4b25-960f-5b9c4774c48b-kube-api-access-c2pqr" (OuterVolumeSpecName: "kube-api-access-c2pqr") pod "c5fa8512-9046-4b25-960f-5b9c4774c48b" (UID: "c5fa8512-9046-4b25-960f-5b9c4774c48b"). InnerVolumeSpecName "kube-api-access-c2pqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:30:03 crc kubenswrapper[5002]: I1203 17:30:03.606771 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2pqr\" (UniqueName: \"kubernetes.io/projected/c5fa8512-9046-4b25-960f-5b9c4774c48b-kube-api-access-c2pqr\") on node \"crc\" DevicePath \"\"" Dec 03 17:30:03 crc kubenswrapper[5002]: I1203 17:30:03.606831 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5fa8512-9046-4b25-960f-5b9c4774c48b-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:30:03 crc kubenswrapper[5002]: I1203 17:30:03.606843 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5fa8512-9046-4b25-960f-5b9c4774c48b-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:30:04 crc kubenswrapper[5002]: I1203 17:30:04.078047 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" event={"ID":"c5fa8512-9046-4b25-960f-5b9c4774c48b","Type":"ContainerDied","Data":"4f4d3b4ba55872f70a36c4e2bda6e0ecf2857f0616205c336de7ee549af48e0b"} Dec 03 17:30:04 crc kubenswrapper[5002]: I1203 17:30:04.078797 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f4d3b4ba55872f70a36c4e2bda6e0ecf2857f0616205c336de7ee549af48e0b" Dec 03 17:30:04 crc kubenswrapper[5002]: I1203 17:30:04.078112 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6" Dec 03 17:30:04 crc kubenswrapper[5002]: I1203 17:30:04.454634 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs"] Dec 03 17:30:04 crc kubenswrapper[5002]: I1203 17:30:04.460186 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413005-jmfzs"] Dec 03 17:30:04 crc kubenswrapper[5002]: I1203 17:30:04.849494 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="002bdda7-9280-4517-a570-2f2d4f1d1dab" path="/var/lib/kubelet/pods/002bdda7-9280-4517-a570-2f2d4f1d1dab/volumes" Dec 03 17:30:40 crc kubenswrapper[5002]: I1203 17:30:40.331433 5002 scope.go:117] "RemoveContainer" containerID="8dd70c8987394a9ec519d147d9609ce75933d4316bd207ebe443d4b203a7a48e" Dec 03 17:30:50 crc kubenswrapper[5002]: I1203 17:30:50.918055 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:30:50 crc kubenswrapper[5002]: I1203 17:30:50.918948 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:31:20 crc kubenswrapper[5002]: I1203 17:31:20.917271 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:31:20 crc kubenswrapper[5002]: I1203 17:31:20.918146 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:31:50 crc kubenswrapper[5002]: I1203 17:31:50.918032 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:31:50 crc kubenswrapper[5002]: I1203 17:31:50.918690 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:31:50 crc kubenswrapper[5002]: I1203 17:31:50.918765 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 17:31:50 crc kubenswrapper[5002]: I1203 17:31:50.919657 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"50724fa1263857bf097e3ea4acc07bf182caf229631c15cda7c9978261b83fc3"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:31:50 crc kubenswrapper[5002]: I1203 17:31:50.919733 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://50724fa1263857bf097e3ea4acc07bf182caf229631c15cda7c9978261b83fc3" gracePeriod=600 Dec 03 17:31:51 crc kubenswrapper[5002]: I1203 17:31:51.054080 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="50724fa1263857bf097e3ea4acc07bf182caf229631c15cda7c9978261b83fc3" exitCode=0 Dec 03 17:31:51 crc kubenswrapper[5002]: I1203 17:31:51.054120 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"50724fa1263857bf097e3ea4acc07bf182caf229631c15cda7c9978261b83fc3"} Dec 03 17:31:51 crc kubenswrapper[5002]: I1203 17:31:51.054151 5002 scope.go:117] "RemoveContainer" containerID="5ee9440f8e3172b7291dd9fbf7853180c8e53a2c94ad905d58d889df96570db0" Dec 03 17:31:52 crc kubenswrapper[5002]: I1203 17:31:52.073261 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071"} Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.663115 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4z5tr"] Dec 03 17:31:53 crc kubenswrapper[5002]: E1203 17:31:53.664036 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fa8512-9046-4b25-960f-5b9c4774c48b" containerName="collect-profiles" Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.664068 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fa8512-9046-4b25-960f-5b9c4774c48b" containerName="collect-profiles" Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.664420 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5fa8512-9046-4b25-960f-5b9c4774c48b" containerName="collect-profiles" Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.666213 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.684303 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4z5tr"] Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.818523 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04b36c9c-dff1-4996-9474-3c7b375dd540-utilities\") pod \"redhat-operators-4z5tr\" (UID: \"04b36c9c-dff1-4996-9474-3c7b375dd540\") " pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.818769 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04b36c9c-dff1-4996-9474-3c7b375dd540-catalog-content\") pod \"redhat-operators-4z5tr\" (UID: \"04b36c9c-dff1-4996-9474-3c7b375dd540\") " pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.818889 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrpg9\" (UniqueName: \"kubernetes.io/projected/04b36c9c-dff1-4996-9474-3c7b375dd540-kube-api-access-zrpg9\") pod \"redhat-operators-4z5tr\" (UID: \"04b36c9c-dff1-4996-9474-3c7b375dd540\") " pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.920469 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrpg9\" (UniqueName: \"kubernetes.io/projected/04b36c9c-dff1-4996-9474-3c7b375dd540-kube-api-access-zrpg9\") pod \"redhat-operators-4z5tr\" (UID: \"04b36c9c-dff1-4996-9474-3c7b375dd540\") " pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.920573 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04b36c9c-dff1-4996-9474-3c7b375dd540-utilities\") pod \"redhat-operators-4z5tr\" (UID: \"04b36c9c-dff1-4996-9474-3c7b375dd540\") " pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.920627 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04b36c9c-dff1-4996-9474-3c7b375dd540-catalog-content\") pod \"redhat-operators-4z5tr\" (UID: \"04b36c9c-dff1-4996-9474-3c7b375dd540\") " pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.921187 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04b36c9c-dff1-4996-9474-3c7b375dd540-catalog-content\") pod \"redhat-operators-4z5tr\" (UID: \"04b36c9c-dff1-4996-9474-3c7b375dd540\") " pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.921222 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04b36c9c-dff1-4996-9474-3c7b375dd540-utilities\") pod \"redhat-operators-4z5tr\" (UID: \"04b36c9c-dff1-4996-9474-3c7b375dd540\") " pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:31:53 crc kubenswrapper[5002]: I1203 17:31:53.941213 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrpg9\" (UniqueName: \"kubernetes.io/projected/04b36c9c-dff1-4996-9474-3c7b375dd540-kube-api-access-zrpg9\") pod \"redhat-operators-4z5tr\" (UID: \"04b36c9c-dff1-4996-9474-3c7b375dd540\") " pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:31:54 crc kubenswrapper[5002]: I1203 17:31:54.035457 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:31:54 crc kubenswrapper[5002]: I1203 17:31:54.249734 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4z5tr"] Dec 03 17:31:54 crc kubenswrapper[5002]: W1203 17:31:54.258180 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04b36c9c_dff1_4996_9474_3c7b375dd540.slice/crio-d519fc3639ac18ce60ec3e25b38e6aac72e043a033a7229b8d381fc5ac0fed1c WatchSource:0}: Error finding container d519fc3639ac18ce60ec3e25b38e6aac72e043a033a7229b8d381fc5ac0fed1c: Status 404 returned error can't find the container with id d519fc3639ac18ce60ec3e25b38e6aac72e043a033a7229b8d381fc5ac0fed1c Dec 03 17:31:55 crc kubenswrapper[5002]: I1203 17:31:55.101522 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4z5tr" event={"ID":"04b36c9c-dff1-4996-9474-3c7b375dd540","Type":"ContainerStarted","Data":"e819435f2f959ce168dc1a797e4b7a8287d42b2534ffb3e0f011c16419b4d2c3"} Dec 03 17:31:55 crc kubenswrapper[5002]: I1203 17:31:55.102544 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4z5tr" event={"ID":"04b36c9c-dff1-4996-9474-3c7b375dd540","Type":"ContainerStarted","Data":"d519fc3639ac18ce60ec3e25b38e6aac72e043a033a7229b8d381fc5ac0fed1c"} Dec 03 17:31:56 crc kubenswrapper[5002]: I1203 17:31:56.110942 5002 generic.go:334] "Generic (PLEG): container finished" podID="04b36c9c-dff1-4996-9474-3c7b375dd540" containerID="e819435f2f959ce168dc1a797e4b7a8287d42b2534ffb3e0f011c16419b4d2c3" exitCode=0 Dec 03 17:31:56 crc kubenswrapper[5002]: I1203 17:31:56.111072 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4z5tr" event={"ID":"04b36c9c-dff1-4996-9474-3c7b375dd540","Type":"ContainerDied","Data":"e819435f2f959ce168dc1a797e4b7a8287d42b2534ffb3e0f011c16419b4d2c3"} Dec 03 17:31:58 crc kubenswrapper[5002]: I1203 17:31:58.137352 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4z5tr" event={"ID":"04b36c9c-dff1-4996-9474-3c7b375dd540","Type":"ContainerStarted","Data":"b8651eca80396044b46db1844e98373b9ac0a4b432c3ac82df58a81697827049"} Dec 03 17:31:59 crc kubenswrapper[5002]: I1203 17:31:59.148548 5002 generic.go:334] "Generic (PLEG): container finished" podID="04b36c9c-dff1-4996-9474-3c7b375dd540" containerID="b8651eca80396044b46db1844e98373b9ac0a4b432c3ac82df58a81697827049" exitCode=0 Dec 03 17:31:59 crc kubenswrapper[5002]: I1203 17:31:59.148833 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4z5tr" event={"ID":"04b36c9c-dff1-4996-9474-3c7b375dd540","Type":"ContainerDied","Data":"b8651eca80396044b46db1844e98373b9ac0a4b432c3ac82df58a81697827049"} Dec 03 17:31:59 crc kubenswrapper[5002]: I1203 17:31:59.151457 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:32:00 crc kubenswrapper[5002]: I1203 17:32:00.158571 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4z5tr" event={"ID":"04b36c9c-dff1-4996-9474-3c7b375dd540","Type":"ContainerStarted","Data":"f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379"} Dec 03 17:32:00 crc kubenswrapper[5002]: I1203 17:32:00.179834 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4z5tr" podStartSLOduration=3.71279709 podStartE2EDuration="7.179808929s" podCreationTimestamp="2025-12-03 17:31:53 +0000 UTC" firstStartedPulling="2025-12-03 17:31:56.113650395 +0000 UTC m=+3639.527472293" lastFinishedPulling="2025-12-03 17:31:59.580662214 +0000 UTC m=+3642.994484132" observedRunningTime="2025-12-03 17:32:00.173068658 +0000 UTC m=+3643.586890546" watchObservedRunningTime="2025-12-03 17:32:00.179808929 +0000 UTC m=+3643.593630857" Dec 03 17:32:04 crc kubenswrapper[5002]: I1203 17:32:04.035807 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:32:04 crc kubenswrapper[5002]: I1203 17:32:04.037008 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:32:05 crc kubenswrapper[5002]: I1203 17:32:05.092809 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4z5tr" podUID="04b36c9c-dff1-4996-9474-3c7b375dd540" containerName="registry-server" probeResult="failure" output=< Dec 03 17:32:05 crc kubenswrapper[5002]: timeout: failed to connect service ":50051" within 1s Dec 03 17:32:05 crc kubenswrapper[5002]: > Dec 03 17:32:14 crc kubenswrapper[5002]: I1203 17:32:14.110342 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:32:14 crc kubenswrapper[5002]: I1203 17:32:14.182839 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:32:14 crc kubenswrapper[5002]: I1203 17:32:14.365450 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4z5tr"] Dec 03 17:32:15 crc kubenswrapper[5002]: I1203 17:32:15.287168 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4z5tr" podUID="04b36c9c-dff1-4996-9474-3c7b375dd540" containerName="registry-server" containerID="cri-o://f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379" gracePeriod=2 Dec 03 17:32:15 crc kubenswrapper[5002]: I1203 17:32:15.763451 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:32:15 crc kubenswrapper[5002]: I1203 17:32:15.844961 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04b36c9c-dff1-4996-9474-3c7b375dd540-utilities\") pod \"04b36c9c-dff1-4996-9474-3c7b375dd540\" (UID: \"04b36c9c-dff1-4996-9474-3c7b375dd540\") " Dec 03 17:32:15 crc kubenswrapper[5002]: I1203 17:32:15.845865 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04b36c9c-dff1-4996-9474-3c7b375dd540-utilities" (OuterVolumeSpecName: "utilities") pod "04b36c9c-dff1-4996-9474-3c7b375dd540" (UID: "04b36c9c-dff1-4996-9474-3c7b375dd540"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:32:15 crc kubenswrapper[5002]: I1203 17:32:15.846026 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrpg9\" (UniqueName: \"kubernetes.io/projected/04b36c9c-dff1-4996-9474-3c7b375dd540-kube-api-access-zrpg9\") pod \"04b36c9c-dff1-4996-9474-3c7b375dd540\" (UID: \"04b36c9c-dff1-4996-9474-3c7b375dd540\") " Dec 03 17:32:15 crc kubenswrapper[5002]: I1203 17:32:15.846996 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04b36c9c-dff1-4996-9474-3c7b375dd540-catalog-content\") pod \"04b36c9c-dff1-4996-9474-3c7b375dd540\" (UID: \"04b36c9c-dff1-4996-9474-3c7b375dd540\") " Dec 03 17:32:15 crc kubenswrapper[5002]: I1203 17:32:15.865953 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04b36c9c-dff1-4996-9474-3c7b375dd540-kube-api-access-zrpg9" (OuterVolumeSpecName: "kube-api-access-zrpg9") pod "04b36c9c-dff1-4996-9474-3c7b375dd540" (UID: "04b36c9c-dff1-4996-9474-3c7b375dd540"). InnerVolumeSpecName "kube-api-access-zrpg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:32:15 crc kubenswrapper[5002]: I1203 17:32:15.867806 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrpg9\" (UniqueName: \"kubernetes.io/projected/04b36c9c-dff1-4996-9474-3c7b375dd540-kube-api-access-zrpg9\") on node \"crc\" DevicePath \"\"" Dec 03 17:32:15 crc kubenswrapper[5002]: I1203 17:32:15.867876 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04b36c9c-dff1-4996-9474-3c7b375dd540-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:32:15 crc kubenswrapper[5002]: I1203 17:32:15.973699 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04b36c9c-dff1-4996-9474-3c7b375dd540-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "04b36c9c-dff1-4996-9474-3c7b375dd540" (UID: "04b36c9c-dff1-4996-9474-3c7b375dd540"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.072602 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04b36c9c-dff1-4996-9474-3c7b375dd540-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.301934 5002 generic.go:334] "Generic (PLEG): container finished" podID="04b36c9c-dff1-4996-9474-3c7b375dd540" containerID="f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379" exitCode=0 Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.302017 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4z5tr" event={"ID":"04b36c9c-dff1-4996-9474-3c7b375dd540","Type":"ContainerDied","Data":"f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379"} Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.302052 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4z5tr" Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.302070 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4z5tr" event={"ID":"04b36c9c-dff1-4996-9474-3c7b375dd540","Type":"ContainerDied","Data":"d519fc3639ac18ce60ec3e25b38e6aac72e043a033a7229b8d381fc5ac0fed1c"} Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.302117 5002 scope.go:117] "RemoveContainer" containerID="f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379" Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.334558 5002 scope.go:117] "RemoveContainer" containerID="b8651eca80396044b46db1844e98373b9ac0a4b432c3ac82df58a81697827049" Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.370733 5002 scope.go:117] "RemoveContainer" containerID="e819435f2f959ce168dc1a797e4b7a8287d42b2534ffb3e0f011c16419b4d2c3" Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.370839 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4z5tr"] Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.378463 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4z5tr"] Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.397557 5002 scope.go:117] "RemoveContainer" containerID="f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379" Dec 03 17:32:16 crc kubenswrapper[5002]: E1203 17:32:16.399068 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379\": container with ID starting with f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379 not found: ID does not exist" containerID="f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379" Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.399148 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379"} err="failed to get container status \"f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379\": rpc error: code = NotFound desc = could not find container \"f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379\": container with ID starting with f852e1cc840cae251959aec24b6c5cb499c3d22ba4213091def1f12fe49d8379 not found: ID does not exist" Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.399217 5002 scope.go:117] "RemoveContainer" containerID="b8651eca80396044b46db1844e98373b9ac0a4b432c3ac82df58a81697827049" Dec 03 17:32:16 crc kubenswrapper[5002]: E1203 17:32:16.399816 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8651eca80396044b46db1844e98373b9ac0a4b432c3ac82df58a81697827049\": container with ID starting with b8651eca80396044b46db1844e98373b9ac0a4b432c3ac82df58a81697827049 not found: ID does not exist" containerID="b8651eca80396044b46db1844e98373b9ac0a4b432c3ac82df58a81697827049" Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.399869 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8651eca80396044b46db1844e98373b9ac0a4b432c3ac82df58a81697827049"} err="failed to get container status \"b8651eca80396044b46db1844e98373b9ac0a4b432c3ac82df58a81697827049\": rpc error: code = NotFound desc = could not find container \"b8651eca80396044b46db1844e98373b9ac0a4b432c3ac82df58a81697827049\": container with ID starting with b8651eca80396044b46db1844e98373b9ac0a4b432c3ac82df58a81697827049 not found: ID does not exist" Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.399901 5002 scope.go:117] "RemoveContainer" containerID="e819435f2f959ce168dc1a797e4b7a8287d42b2534ffb3e0f011c16419b4d2c3" Dec 03 17:32:16 crc kubenswrapper[5002]: E1203 17:32:16.400399 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e819435f2f959ce168dc1a797e4b7a8287d42b2534ffb3e0f011c16419b4d2c3\": container with ID starting with e819435f2f959ce168dc1a797e4b7a8287d42b2534ffb3e0f011c16419b4d2c3 not found: ID does not exist" containerID="e819435f2f959ce168dc1a797e4b7a8287d42b2534ffb3e0f011c16419b4d2c3" Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.400463 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e819435f2f959ce168dc1a797e4b7a8287d42b2534ffb3e0f011c16419b4d2c3"} err="failed to get container status \"e819435f2f959ce168dc1a797e4b7a8287d42b2534ffb3e0f011c16419b4d2c3\": rpc error: code = NotFound desc = could not find container \"e819435f2f959ce168dc1a797e4b7a8287d42b2534ffb3e0f011c16419b4d2c3\": container with ID starting with e819435f2f959ce168dc1a797e4b7a8287d42b2534ffb3e0f011c16419b4d2c3 not found: ID does not exist" Dec 03 17:32:16 crc kubenswrapper[5002]: I1203 17:32:16.856897 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04b36c9c-dff1-4996-9474-3c7b375dd540" path="/var/lib/kubelet/pods/04b36c9c-dff1-4996-9474-3c7b375dd540/volumes" Dec 03 17:34:20 crc kubenswrapper[5002]: I1203 17:34:20.917783 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:34:20 crc kubenswrapper[5002]: I1203 17:34:20.918724 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:34:50 crc kubenswrapper[5002]: I1203 17:34:50.917191 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:34:50 crc kubenswrapper[5002]: I1203 17:34:50.918160 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:35:20 crc kubenswrapper[5002]: I1203 17:35:20.916320 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:35:20 crc kubenswrapper[5002]: I1203 17:35:20.917004 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:35:20 crc kubenswrapper[5002]: I1203 17:35:20.917080 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 17:35:20 crc kubenswrapper[5002]: I1203 17:35:20.917831 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:35:20 crc kubenswrapper[5002]: I1203 17:35:20.917926 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" gracePeriod=600 Dec 03 17:35:21 crc kubenswrapper[5002]: E1203 17:35:21.048591 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:35:21 crc kubenswrapper[5002]: I1203 17:35:21.224815 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" exitCode=0 Dec 03 17:35:21 crc kubenswrapper[5002]: I1203 17:35:21.224885 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071"} Dec 03 17:35:21 crc kubenswrapper[5002]: I1203 17:35:21.224983 5002 scope.go:117] "RemoveContainer" containerID="50724fa1263857bf097e3ea4acc07bf182caf229631c15cda7c9978261b83fc3" Dec 03 17:35:21 crc kubenswrapper[5002]: I1203 17:35:21.225662 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:35:21 crc kubenswrapper[5002]: E1203 17:35:21.226518 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:35:31 crc kubenswrapper[5002]: I1203 17:35:31.840501 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:35:31 crc kubenswrapper[5002]: E1203 17:35:31.841902 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:35:42 crc kubenswrapper[5002]: I1203 17:35:42.841072 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:35:42 crc kubenswrapper[5002]: E1203 17:35:42.842065 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:35:55 crc kubenswrapper[5002]: I1203 17:35:55.841045 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:35:55 crc kubenswrapper[5002]: E1203 17:35:55.842249 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:36:07 crc kubenswrapper[5002]: I1203 17:36:07.841296 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:36:07 crc kubenswrapper[5002]: E1203 17:36:07.843118 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:36:22 crc kubenswrapper[5002]: I1203 17:36:22.840077 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:36:22 crc kubenswrapper[5002]: E1203 17:36:22.841185 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:36:36 crc kubenswrapper[5002]: I1203 17:36:36.849949 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:36:36 crc kubenswrapper[5002]: E1203 17:36:36.851005 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:36:50 crc kubenswrapper[5002]: I1203 17:36:50.840982 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:36:50 crc kubenswrapper[5002]: E1203 17:36:50.842072 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:37:05 crc kubenswrapper[5002]: I1203 17:37:05.840890 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:37:05 crc kubenswrapper[5002]: E1203 17:37:05.841904 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:37:17 crc kubenswrapper[5002]: I1203 17:37:17.840552 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:37:17 crc kubenswrapper[5002]: E1203 17:37:17.841538 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.111818 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nsnc2"] Dec 03 17:37:22 crc kubenswrapper[5002]: E1203 17:37:22.113678 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04b36c9c-dff1-4996-9474-3c7b375dd540" containerName="extract-utilities" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.113788 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="04b36c9c-dff1-4996-9474-3c7b375dd540" containerName="extract-utilities" Dec 03 17:37:22 crc kubenswrapper[5002]: E1203 17:37:22.113865 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04b36c9c-dff1-4996-9474-3c7b375dd540" containerName="extract-content" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.113932 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="04b36c9c-dff1-4996-9474-3c7b375dd540" containerName="extract-content" Dec 03 17:37:22 crc kubenswrapper[5002]: E1203 17:37:22.114019 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04b36c9c-dff1-4996-9474-3c7b375dd540" containerName="registry-server" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.114078 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="04b36c9c-dff1-4996-9474-3c7b375dd540" containerName="registry-server" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.114327 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="04b36c9c-dff1-4996-9474-3c7b375dd540" containerName="registry-server" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.115622 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.130693 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nsnc2"] Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.144064 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c446v\" (UniqueName: \"kubernetes.io/projected/9b70daf6-34a1-4de4-98ad-ec2708b1c139-kube-api-access-c446v\") pod \"redhat-marketplace-nsnc2\" (UID: \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\") " pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.144129 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b70daf6-34a1-4de4-98ad-ec2708b1c139-utilities\") pod \"redhat-marketplace-nsnc2\" (UID: \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\") " pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.144165 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b70daf6-34a1-4de4-98ad-ec2708b1c139-catalog-content\") pod \"redhat-marketplace-nsnc2\" (UID: \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\") " pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.245079 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c446v\" (UniqueName: \"kubernetes.io/projected/9b70daf6-34a1-4de4-98ad-ec2708b1c139-kube-api-access-c446v\") pod \"redhat-marketplace-nsnc2\" (UID: \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\") " pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.245480 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b70daf6-34a1-4de4-98ad-ec2708b1c139-utilities\") pod \"redhat-marketplace-nsnc2\" (UID: \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\") " pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.245956 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b70daf6-34a1-4de4-98ad-ec2708b1c139-utilities\") pod \"redhat-marketplace-nsnc2\" (UID: \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\") " pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.246025 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b70daf6-34a1-4de4-98ad-ec2708b1c139-catalog-content\") pod \"redhat-marketplace-nsnc2\" (UID: \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\") " pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.246336 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b70daf6-34a1-4de4-98ad-ec2708b1c139-catalog-content\") pod \"redhat-marketplace-nsnc2\" (UID: \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\") " pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.266310 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c446v\" (UniqueName: \"kubernetes.io/projected/9b70daf6-34a1-4de4-98ad-ec2708b1c139-kube-api-access-c446v\") pod \"redhat-marketplace-nsnc2\" (UID: \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\") " pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.443727 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:22 crc kubenswrapper[5002]: I1203 17:37:22.891904 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nsnc2"] Dec 03 17:37:23 crc kubenswrapper[5002]: I1203 17:37:23.341852 5002 generic.go:334] "Generic (PLEG): container finished" podID="9b70daf6-34a1-4de4-98ad-ec2708b1c139" containerID="eecf266e81cc8e76dcfefd949e20ab4c47f8212b825d0e3881377873fab1a5f0" exitCode=0 Dec 03 17:37:23 crc kubenswrapper[5002]: I1203 17:37:23.341976 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nsnc2" event={"ID":"9b70daf6-34a1-4de4-98ad-ec2708b1c139","Type":"ContainerDied","Data":"eecf266e81cc8e76dcfefd949e20ab4c47f8212b825d0e3881377873fab1a5f0"} Dec 03 17:37:23 crc kubenswrapper[5002]: I1203 17:37:23.342271 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nsnc2" event={"ID":"9b70daf6-34a1-4de4-98ad-ec2708b1c139","Type":"ContainerStarted","Data":"7f34fdaff6b086259b136a375d374223fa1000c858b2734f80b8bccdbbae6fbb"} Dec 03 17:37:23 crc kubenswrapper[5002]: I1203 17:37:23.344721 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:37:24 crc kubenswrapper[5002]: I1203 17:37:24.350324 5002 generic.go:334] "Generic (PLEG): container finished" podID="9b70daf6-34a1-4de4-98ad-ec2708b1c139" containerID="611d71bb2caeccd7e66f9c5044089308c6ea760984637fe29b24f3807bec6631" exitCode=0 Dec 03 17:37:24 crc kubenswrapper[5002]: I1203 17:37:24.350367 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nsnc2" event={"ID":"9b70daf6-34a1-4de4-98ad-ec2708b1c139","Type":"ContainerDied","Data":"611d71bb2caeccd7e66f9c5044089308c6ea760984637fe29b24f3807bec6631"} Dec 03 17:37:25 crc kubenswrapper[5002]: I1203 17:37:25.360619 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nsnc2" event={"ID":"9b70daf6-34a1-4de4-98ad-ec2708b1c139","Type":"ContainerStarted","Data":"3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a"} Dec 03 17:37:25 crc kubenswrapper[5002]: I1203 17:37:25.381540 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nsnc2" podStartSLOduration=1.9846758869999999 podStartE2EDuration="3.381520924s" podCreationTimestamp="2025-12-03 17:37:22 +0000 UTC" firstStartedPulling="2025-12-03 17:37:23.344485843 +0000 UTC m=+3966.758307731" lastFinishedPulling="2025-12-03 17:37:24.74133088 +0000 UTC m=+3968.155152768" observedRunningTime="2025-12-03 17:37:25.375556224 +0000 UTC m=+3968.789378112" watchObservedRunningTime="2025-12-03 17:37:25.381520924 +0000 UTC m=+3968.795342812" Dec 03 17:37:28 crc kubenswrapper[5002]: I1203 17:37:28.840363 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:37:28 crc kubenswrapper[5002]: E1203 17:37:28.841114 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:37:32 crc kubenswrapper[5002]: I1203 17:37:32.444486 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:32 crc kubenswrapper[5002]: I1203 17:37:32.445118 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:32 crc kubenswrapper[5002]: I1203 17:37:32.510103 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:33 crc kubenswrapper[5002]: I1203 17:37:33.494095 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:33 crc kubenswrapper[5002]: I1203 17:37:33.548374 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nsnc2"] Dec 03 17:37:35 crc kubenswrapper[5002]: I1203 17:37:35.444070 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nsnc2" podUID="9b70daf6-34a1-4de4-98ad-ec2708b1c139" containerName="registry-server" containerID="cri-o://3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a" gracePeriod=2 Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.450415 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.453786 5002 generic.go:334] "Generic (PLEG): container finished" podID="9b70daf6-34a1-4de4-98ad-ec2708b1c139" containerID="3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a" exitCode=0 Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.453833 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nsnc2" event={"ID":"9b70daf6-34a1-4de4-98ad-ec2708b1c139","Type":"ContainerDied","Data":"3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a"} Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.453870 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nsnc2" event={"ID":"9b70daf6-34a1-4de4-98ad-ec2708b1c139","Type":"ContainerDied","Data":"7f34fdaff6b086259b136a375d374223fa1000c858b2734f80b8bccdbbae6fbb"} Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.453883 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nsnc2" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.453890 5002 scope.go:117] "RemoveContainer" containerID="3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.485025 5002 scope.go:117] "RemoveContainer" containerID="611d71bb2caeccd7e66f9c5044089308c6ea760984637fe29b24f3807bec6631" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.506690 5002 scope.go:117] "RemoveContainer" containerID="eecf266e81cc8e76dcfefd949e20ab4c47f8212b825d0e3881377873fab1a5f0" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.528930 5002 scope.go:117] "RemoveContainer" containerID="3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a" Dec 03 17:37:36 crc kubenswrapper[5002]: E1203 17:37:36.529660 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a\": container with ID starting with 3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a not found: ID does not exist" containerID="3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.529698 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a"} err="failed to get container status \"3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a\": rpc error: code = NotFound desc = could not find container \"3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a\": container with ID starting with 3e54a83df0dd044444c1f9b745b371eae546ac7d318bd4a3a5a31e452b21c28a not found: ID does not exist" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.529723 5002 scope.go:117] "RemoveContainer" containerID="611d71bb2caeccd7e66f9c5044089308c6ea760984637fe29b24f3807bec6631" Dec 03 17:37:36 crc kubenswrapper[5002]: E1203 17:37:36.530132 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"611d71bb2caeccd7e66f9c5044089308c6ea760984637fe29b24f3807bec6631\": container with ID starting with 611d71bb2caeccd7e66f9c5044089308c6ea760984637fe29b24f3807bec6631 not found: ID does not exist" containerID="611d71bb2caeccd7e66f9c5044089308c6ea760984637fe29b24f3807bec6631" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.530165 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"611d71bb2caeccd7e66f9c5044089308c6ea760984637fe29b24f3807bec6631"} err="failed to get container status \"611d71bb2caeccd7e66f9c5044089308c6ea760984637fe29b24f3807bec6631\": rpc error: code = NotFound desc = could not find container \"611d71bb2caeccd7e66f9c5044089308c6ea760984637fe29b24f3807bec6631\": container with ID starting with 611d71bb2caeccd7e66f9c5044089308c6ea760984637fe29b24f3807bec6631 not found: ID does not exist" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.530237 5002 scope.go:117] "RemoveContainer" containerID="eecf266e81cc8e76dcfefd949e20ab4c47f8212b825d0e3881377873fab1a5f0" Dec 03 17:37:36 crc kubenswrapper[5002]: E1203 17:37:36.530692 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eecf266e81cc8e76dcfefd949e20ab4c47f8212b825d0e3881377873fab1a5f0\": container with ID starting with eecf266e81cc8e76dcfefd949e20ab4c47f8212b825d0e3881377873fab1a5f0 not found: ID does not exist" containerID="eecf266e81cc8e76dcfefd949e20ab4c47f8212b825d0e3881377873fab1a5f0" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.530727 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eecf266e81cc8e76dcfefd949e20ab4c47f8212b825d0e3881377873fab1a5f0"} err="failed to get container status \"eecf266e81cc8e76dcfefd949e20ab4c47f8212b825d0e3881377873fab1a5f0\": rpc error: code = NotFound desc = could not find container \"eecf266e81cc8e76dcfefd949e20ab4c47f8212b825d0e3881377873fab1a5f0\": container with ID starting with eecf266e81cc8e76dcfefd949e20ab4c47f8212b825d0e3881377873fab1a5f0 not found: ID does not exist" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.602520 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b70daf6-34a1-4de4-98ad-ec2708b1c139-utilities\") pod \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\" (UID: \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\") " Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.602588 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c446v\" (UniqueName: \"kubernetes.io/projected/9b70daf6-34a1-4de4-98ad-ec2708b1c139-kube-api-access-c446v\") pod \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\" (UID: \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\") " Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.602682 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b70daf6-34a1-4de4-98ad-ec2708b1c139-catalog-content\") pod \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\" (UID: \"9b70daf6-34a1-4de4-98ad-ec2708b1c139\") " Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.605699 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b70daf6-34a1-4de4-98ad-ec2708b1c139-utilities" (OuterVolumeSpecName: "utilities") pod "9b70daf6-34a1-4de4-98ad-ec2708b1c139" (UID: "9b70daf6-34a1-4de4-98ad-ec2708b1c139"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.610257 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b70daf6-34a1-4de4-98ad-ec2708b1c139-kube-api-access-c446v" (OuterVolumeSpecName: "kube-api-access-c446v") pod "9b70daf6-34a1-4de4-98ad-ec2708b1c139" (UID: "9b70daf6-34a1-4de4-98ad-ec2708b1c139"). InnerVolumeSpecName "kube-api-access-c446v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.621625 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b70daf6-34a1-4de4-98ad-ec2708b1c139-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b70daf6-34a1-4de4-98ad-ec2708b1c139" (UID: "9b70daf6-34a1-4de4-98ad-ec2708b1c139"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.704180 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b70daf6-34a1-4de4-98ad-ec2708b1c139-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.704228 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b70daf6-34a1-4de4-98ad-ec2708b1c139-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.704242 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c446v\" (UniqueName: \"kubernetes.io/projected/9b70daf6-34a1-4de4-98ad-ec2708b1c139-kube-api-access-c446v\") on node \"crc\" DevicePath \"\"" Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.791215 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nsnc2"] Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.797653 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nsnc2"] Dec 03 17:37:36 crc kubenswrapper[5002]: I1203 17:37:36.855383 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b70daf6-34a1-4de4-98ad-ec2708b1c139" path="/var/lib/kubelet/pods/9b70daf6-34a1-4de4-98ad-ec2708b1c139/volumes" Dec 03 17:37:41 crc kubenswrapper[5002]: I1203 17:37:41.841100 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:37:41 crc kubenswrapper[5002]: E1203 17:37:41.841817 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:37:52 crc kubenswrapper[5002]: I1203 17:37:52.840963 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:37:52 crc kubenswrapper[5002]: E1203 17:37:52.842075 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:38:07 crc kubenswrapper[5002]: I1203 17:38:07.840088 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:38:07 crc kubenswrapper[5002]: E1203 17:38:07.841095 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:38:22 crc kubenswrapper[5002]: I1203 17:38:22.841090 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:38:22 crc kubenswrapper[5002]: E1203 17:38:22.842139 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:38:35 crc kubenswrapper[5002]: I1203 17:38:35.841103 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:38:35 crc kubenswrapper[5002]: E1203 17:38:35.842351 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:38:49 crc kubenswrapper[5002]: I1203 17:38:49.840410 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:38:49 crc kubenswrapper[5002]: E1203 17:38:49.841493 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:39:02 crc kubenswrapper[5002]: I1203 17:39:02.840828 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:39:02 crc kubenswrapper[5002]: E1203 17:39:02.841784 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:39:16 crc kubenswrapper[5002]: I1203 17:39:16.844624 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:39:16 crc kubenswrapper[5002]: E1203 17:39:16.845467 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.043152 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4qxb9"] Dec 03 17:39:25 crc kubenswrapper[5002]: E1203 17:39:25.044382 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b70daf6-34a1-4de4-98ad-ec2708b1c139" containerName="registry-server" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.044419 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b70daf6-34a1-4de4-98ad-ec2708b1c139" containerName="registry-server" Dec 03 17:39:25 crc kubenswrapper[5002]: E1203 17:39:25.044444 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b70daf6-34a1-4de4-98ad-ec2708b1c139" containerName="extract-content" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.044462 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b70daf6-34a1-4de4-98ad-ec2708b1c139" containerName="extract-content" Dec 03 17:39:25 crc kubenswrapper[5002]: E1203 17:39:25.044530 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b70daf6-34a1-4de4-98ad-ec2708b1c139" containerName="extract-utilities" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.044553 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b70daf6-34a1-4de4-98ad-ec2708b1c139" containerName="extract-utilities" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.044967 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b70daf6-34a1-4de4-98ad-ec2708b1c139" containerName="registry-server" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.047347 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.066110 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4qxb9"] Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.093468 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e166f340-ef21-4c2e-9fc2-c8862c92d667-catalog-content\") pod \"certified-operators-4qxb9\" (UID: \"e166f340-ef21-4c2e-9fc2-c8862c92d667\") " pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.093894 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e166f340-ef21-4c2e-9fc2-c8862c92d667-utilities\") pod \"certified-operators-4qxb9\" (UID: \"e166f340-ef21-4c2e-9fc2-c8862c92d667\") " pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.093952 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqzrt\" (UniqueName: \"kubernetes.io/projected/e166f340-ef21-4c2e-9fc2-c8862c92d667-kube-api-access-vqzrt\") pod \"certified-operators-4qxb9\" (UID: \"e166f340-ef21-4c2e-9fc2-c8862c92d667\") " pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.195298 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e166f340-ef21-4c2e-9fc2-c8862c92d667-catalog-content\") pod \"certified-operators-4qxb9\" (UID: \"e166f340-ef21-4c2e-9fc2-c8862c92d667\") " pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.195368 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e166f340-ef21-4c2e-9fc2-c8862c92d667-utilities\") pod \"certified-operators-4qxb9\" (UID: \"e166f340-ef21-4c2e-9fc2-c8862c92d667\") " pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.195421 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqzrt\" (UniqueName: \"kubernetes.io/projected/e166f340-ef21-4c2e-9fc2-c8862c92d667-kube-api-access-vqzrt\") pod \"certified-operators-4qxb9\" (UID: \"e166f340-ef21-4c2e-9fc2-c8862c92d667\") " pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.195798 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e166f340-ef21-4c2e-9fc2-c8862c92d667-catalog-content\") pod \"certified-operators-4qxb9\" (UID: \"e166f340-ef21-4c2e-9fc2-c8862c92d667\") " pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.195993 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e166f340-ef21-4c2e-9fc2-c8862c92d667-utilities\") pod \"certified-operators-4qxb9\" (UID: \"e166f340-ef21-4c2e-9fc2-c8862c92d667\") " pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.213628 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqzrt\" (UniqueName: \"kubernetes.io/projected/e166f340-ef21-4c2e-9fc2-c8862c92d667-kube-api-access-vqzrt\") pod \"certified-operators-4qxb9\" (UID: \"e166f340-ef21-4c2e-9fc2-c8862c92d667\") " pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.244089 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-45588"] Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.246284 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.261599 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-45588"] Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.296435 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/022660f8-6bda-4fbd-a5a0-2452709a5796-catalog-content\") pod \"community-operators-45588\" (UID: \"022660f8-6bda-4fbd-a5a0-2452709a5796\") " pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.296519 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/022660f8-6bda-4fbd-a5a0-2452709a5796-utilities\") pod \"community-operators-45588\" (UID: \"022660f8-6bda-4fbd-a5a0-2452709a5796\") " pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.296686 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-splzz\" (UniqueName: \"kubernetes.io/projected/022660f8-6bda-4fbd-a5a0-2452709a5796-kube-api-access-splzz\") pod \"community-operators-45588\" (UID: \"022660f8-6bda-4fbd-a5a0-2452709a5796\") " pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.385725 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.398691 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/022660f8-6bda-4fbd-a5a0-2452709a5796-utilities\") pod \"community-operators-45588\" (UID: \"022660f8-6bda-4fbd-a5a0-2452709a5796\") " pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.398796 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-splzz\" (UniqueName: \"kubernetes.io/projected/022660f8-6bda-4fbd-a5a0-2452709a5796-kube-api-access-splzz\") pod \"community-operators-45588\" (UID: \"022660f8-6bda-4fbd-a5a0-2452709a5796\") " pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.398840 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/022660f8-6bda-4fbd-a5a0-2452709a5796-catalog-content\") pod \"community-operators-45588\" (UID: \"022660f8-6bda-4fbd-a5a0-2452709a5796\") " pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.399332 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/022660f8-6bda-4fbd-a5a0-2452709a5796-utilities\") pod \"community-operators-45588\" (UID: \"022660f8-6bda-4fbd-a5a0-2452709a5796\") " pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.399371 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/022660f8-6bda-4fbd-a5a0-2452709a5796-catalog-content\") pod \"community-operators-45588\" (UID: \"022660f8-6bda-4fbd-a5a0-2452709a5796\") " pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.418195 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-splzz\" (UniqueName: \"kubernetes.io/projected/022660f8-6bda-4fbd-a5a0-2452709a5796-kube-api-access-splzz\") pod \"community-operators-45588\" (UID: \"022660f8-6bda-4fbd-a5a0-2452709a5796\") " pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.575101 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:25 crc kubenswrapper[5002]: I1203 17:39:25.698653 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4qxb9"] Dec 03 17:39:26 crc kubenswrapper[5002]: I1203 17:39:26.075265 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-45588"] Dec 03 17:39:26 crc kubenswrapper[5002]: W1203 17:39:26.086352 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod022660f8_6bda_4fbd_a5a0_2452709a5796.slice/crio-1f6884d5d0fe7edd9a93ae6d486669e2c19e8ed4bc752b625a27dc68c578d85a WatchSource:0}: Error finding container 1f6884d5d0fe7edd9a93ae6d486669e2c19e8ed4bc752b625a27dc68c578d85a: Status 404 returned error can't find the container with id 1f6884d5d0fe7edd9a93ae6d486669e2c19e8ed4bc752b625a27dc68c578d85a Dec 03 17:39:26 crc kubenswrapper[5002]: I1203 17:39:26.431901 5002 generic.go:334] "Generic (PLEG): container finished" podID="022660f8-6bda-4fbd-a5a0-2452709a5796" containerID="82134cfa1027e8078f58095a13d1f206e85aec20bce773ae4c5837c39341d27a" exitCode=0 Dec 03 17:39:26 crc kubenswrapper[5002]: I1203 17:39:26.432111 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45588" event={"ID":"022660f8-6bda-4fbd-a5a0-2452709a5796","Type":"ContainerDied","Data":"82134cfa1027e8078f58095a13d1f206e85aec20bce773ae4c5837c39341d27a"} Dec 03 17:39:26 crc kubenswrapper[5002]: I1203 17:39:26.432393 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45588" event={"ID":"022660f8-6bda-4fbd-a5a0-2452709a5796","Type":"ContainerStarted","Data":"1f6884d5d0fe7edd9a93ae6d486669e2c19e8ed4bc752b625a27dc68c578d85a"} Dec 03 17:39:26 crc kubenswrapper[5002]: I1203 17:39:26.436802 5002 generic.go:334] "Generic (PLEG): container finished" podID="e166f340-ef21-4c2e-9fc2-c8862c92d667" containerID="381552b8441838aca34ae63301507ada7855614f79fe99d817390cd61db9dc56" exitCode=0 Dec 03 17:39:26 crc kubenswrapper[5002]: I1203 17:39:26.436850 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qxb9" event={"ID":"e166f340-ef21-4c2e-9fc2-c8862c92d667","Type":"ContainerDied","Data":"381552b8441838aca34ae63301507ada7855614f79fe99d817390cd61db9dc56"} Dec 03 17:39:26 crc kubenswrapper[5002]: I1203 17:39:26.436878 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qxb9" event={"ID":"e166f340-ef21-4c2e-9fc2-c8862c92d667","Type":"ContainerStarted","Data":"ba4adbcdcc51d5e202b42c54542ee00af8655337a43c9db7b5532d0c289e6a12"} Dec 03 17:39:27 crc kubenswrapper[5002]: I1203 17:39:27.445927 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qxb9" event={"ID":"e166f340-ef21-4c2e-9fc2-c8862c92d667","Type":"ContainerStarted","Data":"d5ed114cfa5ad190428486a0ffb8408fe6b26e44c5996fa566b1c9b10f7a7ad7"} Dec 03 17:39:27 crc kubenswrapper[5002]: I1203 17:39:27.447737 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45588" event={"ID":"022660f8-6bda-4fbd-a5a0-2452709a5796","Type":"ContainerStarted","Data":"c2013f6f7f51ee5ce8cc6245b77bd816c64094ea334d4e578dfe02c3eb66ada4"} Dec 03 17:39:27 crc kubenswrapper[5002]: I1203 17:39:27.841387 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:39:27 crc kubenswrapper[5002]: E1203 17:39:27.841550 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:39:28 crc kubenswrapper[5002]: I1203 17:39:28.461105 5002 generic.go:334] "Generic (PLEG): container finished" podID="022660f8-6bda-4fbd-a5a0-2452709a5796" containerID="c2013f6f7f51ee5ce8cc6245b77bd816c64094ea334d4e578dfe02c3eb66ada4" exitCode=0 Dec 03 17:39:28 crc kubenswrapper[5002]: I1203 17:39:28.461242 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45588" event={"ID":"022660f8-6bda-4fbd-a5a0-2452709a5796","Type":"ContainerDied","Data":"c2013f6f7f51ee5ce8cc6245b77bd816c64094ea334d4e578dfe02c3eb66ada4"} Dec 03 17:39:28 crc kubenswrapper[5002]: I1203 17:39:28.466910 5002 generic.go:334] "Generic (PLEG): container finished" podID="e166f340-ef21-4c2e-9fc2-c8862c92d667" containerID="d5ed114cfa5ad190428486a0ffb8408fe6b26e44c5996fa566b1c9b10f7a7ad7" exitCode=0 Dec 03 17:39:28 crc kubenswrapper[5002]: I1203 17:39:28.466972 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qxb9" event={"ID":"e166f340-ef21-4c2e-9fc2-c8862c92d667","Type":"ContainerDied","Data":"d5ed114cfa5ad190428486a0ffb8408fe6b26e44c5996fa566b1c9b10f7a7ad7"} Dec 03 17:39:29 crc kubenswrapper[5002]: I1203 17:39:29.476466 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45588" event={"ID":"022660f8-6bda-4fbd-a5a0-2452709a5796","Type":"ContainerStarted","Data":"c5f56644113e249ca1d71b1d4b81cac03dbfbdbb9f016e008dfdfb92613b2178"} Dec 03 17:39:29 crc kubenswrapper[5002]: I1203 17:39:29.479354 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qxb9" event={"ID":"e166f340-ef21-4c2e-9fc2-c8862c92d667","Type":"ContainerStarted","Data":"cc48f386041d06f23d0177c94fdb3271c80c9908ea6d5da652eb12a702b95f45"} Dec 03 17:39:29 crc kubenswrapper[5002]: I1203 17:39:29.526969 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-45588" podStartSLOduration=2.07755226 podStartE2EDuration="4.526951828s" podCreationTimestamp="2025-12-03 17:39:25 +0000 UTC" firstStartedPulling="2025-12-03 17:39:26.434494962 +0000 UTC m=+4089.848316890" lastFinishedPulling="2025-12-03 17:39:28.88389457 +0000 UTC m=+4092.297716458" observedRunningTime="2025-12-03 17:39:29.496017323 +0000 UTC m=+4092.909839211" watchObservedRunningTime="2025-12-03 17:39:29.526951828 +0000 UTC m=+4092.940773716" Dec 03 17:39:29 crc kubenswrapper[5002]: I1203 17:39:29.529290 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4qxb9" podStartSLOduration=2.048346652 podStartE2EDuration="4.529275891s" podCreationTimestamp="2025-12-03 17:39:25 +0000 UTC" firstStartedPulling="2025-12-03 17:39:26.439324802 +0000 UTC m=+4089.853146690" lastFinishedPulling="2025-12-03 17:39:28.920254041 +0000 UTC m=+4092.334075929" observedRunningTime="2025-12-03 17:39:29.523372463 +0000 UTC m=+4092.937194351" watchObservedRunningTime="2025-12-03 17:39:29.529275891 +0000 UTC m=+4092.943097789" Dec 03 17:39:35 crc kubenswrapper[5002]: I1203 17:39:35.386404 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:35 crc kubenswrapper[5002]: I1203 17:39:35.387289 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:35 crc kubenswrapper[5002]: I1203 17:39:35.457163 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:35 crc kubenswrapper[5002]: I1203 17:39:35.576448 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:35 crc kubenswrapper[5002]: I1203 17:39:35.576515 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:35 crc kubenswrapper[5002]: I1203 17:39:35.590128 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:35 crc kubenswrapper[5002]: I1203 17:39:35.625616 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:35 crc kubenswrapper[5002]: I1203 17:39:35.705317 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4qxb9"] Dec 03 17:39:36 crc kubenswrapper[5002]: I1203 17:39:36.604953 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:37 crc kubenswrapper[5002]: I1203 17:39:37.548135 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4qxb9" podUID="e166f340-ef21-4c2e-9fc2-c8862c92d667" containerName="registry-server" containerID="cri-o://cc48f386041d06f23d0177c94fdb3271c80c9908ea6d5da652eb12a702b95f45" gracePeriod=2 Dec 03 17:39:37 crc kubenswrapper[5002]: I1203 17:39:37.902028 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-45588"] Dec 03 17:39:38 crc kubenswrapper[5002]: I1203 17:39:38.558297 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-45588" podUID="022660f8-6bda-4fbd-a5a0-2452709a5796" containerName="registry-server" containerID="cri-o://c5f56644113e249ca1d71b1d4b81cac03dbfbdbb9f016e008dfdfb92613b2178" gracePeriod=2 Dec 03 17:39:39 crc kubenswrapper[5002]: I1203 17:39:39.569378 5002 generic.go:334] "Generic (PLEG): container finished" podID="022660f8-6bda-4fbd-a5a0-2452709a5796" containerID="c5f56644113e249ca1d71b1d4b81cac03dbfbdbb9f016e008dfdfb92613b2178" exitCode=0 Dec 03 17:39:39 crc kubenswrapper[5002]: I1203 17:39:39.569481 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45588" event={"ID":"022660f8-6bda-4fbd-a5a0-2452709a5796","Type":"ContainerDied","Data":"c5f56644113e249ca1d71b1d4b81cac03dbfbdbb9f016e008dfdfb92613b2178"} Dec 03 17:39:39 crc kubenswrapper[5002]: I1203 17:39:39.573506 5002 generic.go:334] "Generic (PLEG): container finished" podID="e166f340-ef21-4c2e-9fc2-c8862c92d667" containerID="cc48f386041d06f23d0177c94fdb3271c80c9908ea6d5da652eb12a702b95f45" exitCode=0 Dec 03 17:39:39 crc kubenswrapper[5002]: I1203 17:39:39.573570 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qxb9" event={"ID":"e166f340-ef21-4c2e-9fc2-c8862c92d667","Type":"ContainerDied","Data":"cc48f386041d06f23d0177c94fdb3271c80c9908ea6d5da652eb12a702b95f45"} Dec 03 17:39:39 crc kubenswrapper[5002]: I1203 17:39:39.779293 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:39 crc kubenswrapper[5002]: I1203 17:39:39.846485 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:39 crc kubenswrapper[5002]: I1203 17:39:39.918257 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/022660f8-6bda-4fbd-a5a0-2452709a5796-catalog-content\") pod \"022660f8-6bda-4fbd-a5a0-2452709a5796\" (UID: \"022660f8-6bda-4fbd-a5a0-2452709a5796\") " Dec 03 17:39:39 crc kubenswrapper[5002]: I1203 17:39:39.918384 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/022660f8-6bda-4fbd-a5a0-2452709a5796-utilities\") pod \"022660f8-6bda-4fbd-a5a0-2452709a5796\" (UID: \"022660f8-6bda-4fbd-a5a0-2452709a5796\") " Dec 03 17:39:39 crc kubenswrapper[5002]: I1203 17:39:39.918441 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-splzz\" (UniqueName: \"kubernetes.io/projected/022660f8-6bda-4fbd-a5a0-2452709a5796-kube-api-access-splzz\") pod \"022660f8-6bda-4fbd-a5a0-2452709a5796\" (UID: \"022660f8-6bda-4fbd-a5a0-2452709a5796\") " Dec 03 17:39:39 crc kubenswrapper[5002]: I1203 17:39:39.919252 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/022660f8-6bda-4fbd-a5a0-2452709a5796-utilities" (OuterVolumeSpecName: "utilities") pod "022660f8-6bda-4fbd-a5a0-2452709a5796" (UID: "022660f8-6bda-4fbd-a5a0-2452709a5796"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:39:39 crc kubenswrapper[5002]: I1203 17:39:39.923392 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/022660f8-6bda-4fbd-a5a0-2452709a5796-kube-api-access-splzz" (OuterVolumeSpecName: "kube-api-access-splzz") pod "022660f8-6bda-4fbd-a5a0-2452709a5796" (UID: "022660f8-6bda-4fbd-a5a0-2452709a5796"). InnerVolumeSpecName "kube-api-access-splzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:39:39 crc kubenswrapper[5002]: I1203 17:39:39.963280 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/022660f8-6bda-4fbd-a5a0-2452709a5796-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "022660f8-6bda-4fbd-a5a0-2452709a5796" (UID: "022660f8-6bda-4fbd-a5a0-2452709a5796"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.019793 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e166f340-ef21-4c2e-9fc2-c8862c92d667-utilities\") pod \"e166f340-ef21-4c2e-9fc2-c8862c92d667\" (UID: \"e166f340-ef21-4c2e-9fc2-c8862c92d667\") " Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.019964 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqzrt\" (UniqueName: \"kubernetes.io/projected/e166f340-ef21-4c2e-9fc2-c8862c92d667-kube-api-access-vqzrt\") pod \"e166f340-ef21-4c2e-9fc2-c8862c92d667\" (UID: \"e166f340-ef21-4c2e-9fc2-c8862c92d667\") " Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.020037 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e166f340-ef21-4c2e-9fc2-c8862c92d667-catalog-content\") pod \"e166f340-ef21-4c2e-9fc2-c8862c92d667\" (UID: \"e166f340-ef21-4c2e-9fc2-c8862c92d667\") " Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.020719 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e166f340-ef21-4c2e-9fc2-c8862c92d667-utilities" (OuterVolumeSpecName: "utilities") pod "e166f340-ef21-4c2e-9fc2-c8862c92d667" (UID: "e166f340-ef21-4c2e-9fc2-c8862c92d667"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.021177 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/022660f8-6bda-4fbd-a5a0-2452709a5796-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.021212 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e166f340-ef21-4c2e-9fc2-c8862c92d667-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.021227 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-splzz\" (UniqueName: \"kubernetes.io/projected/022660f8-6bda-4fbd-a5a0-2452709a5796-kube-api-access-splzz\") on node \"crc\" DevicePath \"\"" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.021240 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/022660f8-6bda-4fbd-a5a0-2452709a5796-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.023591 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e166f340-ef21-4c2e-9fc2-c8862c92d667-kube-api-access-vqzrt" (OuterVolumeSpecName: "kube-api-access-vqzrt") pod "e166f340-ef21-4c2e-9fc2-c8862c92d667" (UID: "e166f340-ef21-4c2e-9fc2-c8862c92d667"). InnerVolumeSpecName "kube-api-access-vqzrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.072583 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e166f340-ef21-4c2e-9fc2-c8862c92d667-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e166f340-ef21-4c2e-9fc2-c8862c92d667" (UID: "e166f340-ef21-4c2e-9fc2-c8862c92d667"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.122499 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e166f340-ef21-4c2e-9fc2-c8862c92d667-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.122536 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqzrt\" (UniqueName: \"kubernetes.io/projected/e166f340-ef21-4c2e-9fc2-c8862c92d667-kube-api-access-vqzrt\") on node \"crc\" DevicePath \"\"" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.587443 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45588" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.587415 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45588" event={"ID":"022660f8-6bda-4fbd-a5a0-2452709a5796","Type":"ContainerDied","Data":"1f6884d5d0fe7edd9a93ae6d486669e2c19e8ed4bc752b625a27dc68c578d85a"} Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.587627 5002 scope.go:117] "RemoveContainer" containerID="c5f56644113e249ca1d71b1d4b81cac03dbfbdbb9f016e008dfdfb92613b2178" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.592096 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qxb9" event={"ID":"e166f340-ef21-4c2e-9fc2-c8862c92d667","Type":"ContainerDied","Data":"ba4adbcdcc51d5e202b42c54542ee00af8655337a43c9db7b5532d0c289e6a12"} Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.592344 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qxb9" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.632042 5002 scope.go:117] "RemoveContainer" containerID="c2013f6f7f51ee5ce8cc6245b77bd816c64094ea334d4e578dfe02c3eb66ada4" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.637427 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4qxb9"] Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.652334 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4qxb9"] Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.660940 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-45588"] Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.668597 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-45588"] Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.679311 5002 scope.go:117] "RemoveContainer" containerID="82134cfa1027e8078f58095a13d1f206e85aec20bce773ae4c5837c39341d27a" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.698304 5002 scope.go:117] "RemoveContainer" containerID="cc48f386041d06f23d0177c94fdb3271c80c9908ea6d5da652eb12a702b95f45" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.730282 5002 scope.go:117] "RemoveContainer" containerID="d5ed114cfa5ad190428486a0ffb8408fe6b26e44c5996fa566b1c9b10f7a7ad7" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.754481 5002 scope.go:117] "RemoveContainer" containerID="381552b8441838aca34ae63301507ada7855614f79fe99d817390cd61db9dc56" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.852684 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="022660f8-6bda-4fbd-a5a0-2452709a5796" path="/var/lib/kubelet/pods/022660f8-6bda-4fbd-a5a0-2452709a5796/volumes" Dec 03 17:39:40 crc kubenswrapper[5002]: I1203 17:39:40.854321 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e166f340-ef21-4c2e-9fc2-c8862c92d667" path="/var/lib/kubelet/pods/e166f340-ef21-4c2e-9fc2-c8862c92d667/volumes" Dec 03 17:39:41 crc kubenswrapper[5002]: I1203 17:39:41.841159 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:39:41 crc kubenswrapper[5002]: E1203 17:39:41.841817 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:39:54 crc kubenswrapper[5002]: I1203 17:39:54.841139 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:39:54 crc kubenswrapper[5002]: E1203 17:39:54.842208 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:40:05 crc kubenswrapper[5002]: I1203 17:40:05.840088 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:40:05 crc kubenswrapper[5002]: E1203 17:40:05.840907 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:40:17 crc kubenswrapper[5002]: I1203 17:40:17.841189 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:40:17 crc kubenswrapper[5002]: E1203 17:40:17.842645 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:40:31 crc kubenswrapper[5002]: I1203 17:40:31.840561 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:40:32 crc kubenswrapper[5002]: I1203 17:40:32.022414 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"98c44593aacea9e973d951622736b6dc67898054e86b410ba52f03e08d1845d1"} Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.830539 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pkjtp"] Dec 03 17:42:33 crc kubenswrapper[5002]: E1203 17:42:33.831981 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="022660f8-6bda-4fbd-a5a0-2452709a5796" containerName="extract-utilities" Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.832010 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="022660f8-6bda-4fbd-a5a0-2452709a5796" containerName="extract-utilities" Dec 03 17:42:33 crc kubenswrapper[5002]: E1203 17:42:33.832034 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="022660f8-6bda-4fbd-a5a0-2452709a5796" containerName="registry-server" Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.832050 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="022660f8-6bda-4fbd-a5a0-2452709a5796" containerName="registry-server" Dec 03 17:42:33 crc kubenswrapper[5002]: E1203 17:42:33.832083 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="022660f8-6bda-4fbd-a5a0-2452709a5796" containerName="extract-content" Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.832099 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="022660f8-6bda-4fbd-a5a0-2452709a5796" containerName="extract-content" Dec 03 17:42:33 crc kubenswrapper[5002]: E1203 17:42:33.832133 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e166f340-ef21-4c2e-9fc2-c8862c92d667" containerName="extract-utilities" Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.832149 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e166f340-ef21-4c2e-9fc2-c8862c92d667" containerName="extract-utilities" Dec 03 17:42:33 crc kubenswrapper[5002]: E1203 17:42:33.832184 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e166f340-ef21-4c2e-9fc2-c8862c92d667" containerName="registry-server" Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.832207 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e166f340-ef21-4c2e-9fc2-c8862c92d667" containerName="registry-server" Dec 03 17:42:33 crc kubenswrapper[5002]: E1203 17:42:33.832237 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e166f340-ef21-4c2e-9fc2-c8862c92d667" containerName="extract-content" Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.832254 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e166f340-ef21-4c2e-9fc2-c8862c92d667" containerName="extract-content" Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.832634 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e166f340-ef21-4c2e-9fc2-c8862c92d667" containerName="registry-server" Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.832687 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="022660f8-6bda-4fbd-a5a0-2452709a5796" containerName="registry-server" Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.836524 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.842328 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pkjtp"] Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.940558 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2303dfc5-cc17-43c5-8170-5bfa3374baa6-utilities\") pod \"redhat-operators-pkjtp\" (UID: \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\") " pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.940698 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2303dfc5-cc17-43c5-8170-5bfa3374baa6-catalog-content\") pod \"redhat-operators-pkjtp\" (UID: \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\") " pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:33 crc kubenswrapper[5002]: I1203 17:42:33.940739 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r6ng\" (UniqueName: \"kubernetes.io/projected/2303dfc5-cc17-43c5-8170-5bfa3374baa6-kube-api-access-9r6ng\") pod \"redhat-operators-pkjtp\" (UID: \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\") " pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:34 crc kubenswrapper[5002]: I1203 17:42:34.042329 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2303dfc5-cc17-43c5-8170-5bfa3374baa6-utilities\") pod \"redhat-operators-pkjtp\" (UID: \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\") " pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:34 crc kubenswrapper[5002]: I1203 17:42:34.042835 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2303dfc5-cc17-43c5-8170-5bfa3374baa6-utilities\") pod \"redhat-operators-pkjtp\" (UID: \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\") " pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:34 crc kubenswrapper[5002]: I1203 17:42:34.043197 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2303dfc5-cc17-43c5-8170-5bfa3374baa6-catalog-content\") pod \"redhat-operators-pkjtp\" (UID: \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\") " pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:34 crc kubenswrapper[5002]: I1203 17:42:34.043277 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2303dfc5-cc17-43c5-8170-5bfa3374baa6-catalog-content\") pod \"redhat-operators-pkjtp\" (UID: \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\") " pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:34 crc kubenswrapper[5002]: I1203 17:42:34.043364 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r6ng\" (UniqueName: \"kubernetes.io/projected/2303dfc5-cc17-43c5-8170-5bfa3374baa6-kube-api-access-9r6ng\") pod \"redhat-operators-pkjtp\" (UID: \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\") " pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:34 crc kubenswrapper[5002]: I1203 17:42:34.070219 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r6ng\" (UniqueName: \"kubernetes.io/projected/2303dfc5-cc17-43c5-8170-5bfa3374baa6-kube-api-access-9r6ng\") pod \"redhat-operators-pkjtp\" (UID: \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\") " pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:34 crc kubenswrapper[5002]: I1203 17:42:34.173440 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:34 crc kubenswrapper[5002]: I1203 17:42:34.610618 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pkjtp"] Dec 03 17:42:35 crc kubenswrapper[5002]: I1203 17:42:35.023549 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkjtp" event={"ID":"2303dfc5-cc17-43c5-8170-5bfa3374baa6","Type":"ContainerStarted","Data":"e22947c2a9a6d88b708c6002782d499860a04ca19788fd2a423b7b396fc05eee"} Dec 03 17:42:36 crc kubenswrapper[5002]: I1203 17:42:36.035503 5002 generic.go:334] "Generic (PLEG): container finished" podID="2303dfc5-cc17-43c5-8170-5bfa3374baa6" containerID="10465c8393093d17f0bd2b464ce89485774883ca24d9db14be1d6eff8c202ce6" exitCode=0 Dec 03 17:42:36 crc kubenswrapper[5002]: I1203 17:42:36.035578 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkjtp" event={"ID":"2303dfc5-cc17-43c5-8170-5bfa3374baa6","Type":"ContainerDied","Data":"10465c8393093d17f0bd2b464ce89485774883ca24d9db14be1d6eff8c202ce6"} Dec 03 17:42:36 crc kubenswrapper[5002]: I1203 17:42:36.038268 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:42:38 crc kubenswrapper[5002]: I1203 17:42:38.053238 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkjtp" event={"ID":"2303dfc5-cc17-43c5-8170-5bfa3374baa6","Type":"ContainerStarted","Data":"8945ad89fc5b196516413bcf297c1afb26a97badc18967fba84553fb124a832f"} Dec 03 17:42:39 crc kubenswrapper[5002]: I1203 17:42:39.069898 5002 generic.go:334] "Generic (PLEG): container finished" podID="2303dfc5-cc17-43c5-8170-5bfa3374baa6" containerID="8945ad89fc5b196516413bcf297c1afb26a97badc18967fba84553fb124a832f" exitCode=0 Dec 03 17:42:39 crc kubenswrapper[5002]: I1203 17:42:39.069938 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkjtp" event={"ID":"2303dfc5-cc17-43c5-8170-5bfa3374baa6","Type":"ContainerDied","Data":"8945ad89fc5b196516413bcf297c1afb26a97badc18967fba84553fb124a832f"} Dec 03 17:42:48 crc kubenswrapper[5002]: I1203 17:42:48.136417 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkjtp" event={"ID":"2303dfc5-cc17-43c5-8170-5bfa3374baa6","Type":"ContainerStarted","Data":"63ed866f31bff6c9433aca711ac82650a78d246848c776bb04118eb3cb1073b2"} Dec 03 17:42:48 crc kubenswrapper[5002]: I1203 17:42:48.154883 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pkjtp" podStartSLOduration=3.932173408 podStartE2EDuration="15.154857113s" podCreationTimestamp="2025-12-03 17:42:33 +0000 UTC" firstStartedPulling="2025-12-03 17:42:36.038033559 +0000 UTC m=+4279.451855447" lastFinishedPulling="2025-12-03 17:42:47.260717264 +0000 UTC m=+4290.674539152" observedRunningTime="2025-12-03 17:42:48.150170256 +0000 UTC m=+4291.563992144" watchObservedRunningTime="2025-12-03 17:42:48.154857113 +0000 UTC m=+4291.568679001" Dec 03 17:42:50 crc kubenswrapper[5002]: I1203 17:42:50.917201 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:42:50 crc kubenswrapper[5002]: I1203 17:42:50.917557 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:42:54 crc kubenswrapper[5002]: I1203 17:42:54.174183 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:54 crc kubenswrapper[5002]: I1203 17:42:54.174465 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:54 crc kubenswrapper[5002]: I1203 17:42:54.222568 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:55 crc kubenswrapper[5002]: I1203 17:42:55.240302 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:42:55 crc kubenswrapper[5002]: I1203 17:42:55.285479 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pkjtp"] Dec 03 17:42:57 crc kubenswrapper[5002]: I1203 17:42:57.200552 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pkjtp" podUID="2303dfc5-cc17-43c5-8170-5bfa3374baa6" containerName="registry-server" containerID="cri-o://63ed866f31bff6c9433aca711ac82650a78d246848c776bb04118eb3cb1073b2" gracePeriod=2 Dec 03 17:43:01 crc kubenswrapper[5002]: I1203 17:43:01.243871 5002 generic.go:334] "Generic (PLEG): container finished" podID="2303dfc5-cc17-43c5-8170-5bfa3374baa6" containerID="63ed866f31bff6c9433aca711ac82650a78d246848c776bb04118eb3cb1073b2" exitCode=0 Dec 03 17:43:01 crc kubenswrapper[5002]: I1203 17:43:01.243962 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkjtp" event={"ID":"2303dfc5-cc17-43c5-8170-5bfa3374baa6","Type":"ContainerDied","Data":"63ed866f31bff6c9433aca711ac82650a78d246848c776bb04118eb3cb1073b2"} Dec 03 17:43:01 crc kubenswrapper[5002]: I1203 17:43:01.758740 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:43:01 crc kubenswrapper[5002]: I1203 17:43:01.934088 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2303dfc5-cc17-43c5-8170-5bfa3374baa6-catalog-content\") pod \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\" (UID: \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\") " Dec 03 17:43:01 crc kubenswrapper[5002]: I1203 17:43:01.934551 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9r6ng\" (UniqueName: \"kubernetes.io/projected/2303dfc5-cc17-43c5-8170-5bfa3374baa6-kube-api-access-9r6ng\") pod \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\" (UID: \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\") " Dec 03 17:43:01 crc kubenswrapper[5002]: I1203 17:43:01.934600 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2303dfc5-cc17-43c5-8170-5bfa3374baa6-utilities\") pod \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\" (UID: \"2303dfc5-cc17-43c5-8170-5bfa3374baa6\") " Dec 03 17:43:01 crc kubenswrapper[5002]: I1203 17:43:01.935448 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2303dfc5-cc17-43c5-8170-5bfa3374baa6-utilities" (OuterVolumeSpecName: "utilities") pod "2303dfc5-cc17-43c5-8170-5bfa3374baa6" (UID: "2303dfc5-cc17-43c5-8170-5bfa3374baa6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:43:01 crc kubenswrapper[5002]: I1203 17:43:01.943575 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2303dfc5-cc17-43c5-8170-5bfa3374baa6-kube-api-access-9r6ng" (OuterVolumeSpecName: "kube-api-access-9r6ng") pod "2303dfc5-cc17-43c5-8170-5bfa3374baa6" (UID: "2303dfc5-cc17-43c5-8170-5bfa3374baa6"). InnerVolumeSpecName "kube-api-access-9r6ng". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:43:02 crc kubenswrapper[5002]: I1203 17:43:02.054734 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9r6ng\" (UniqueName: \"kubernetes.io/projected/2303dfc5-cc17-43c5-8170-5bfa3374baa6-kube-api-access-9r6ng\") on node \"crc\" DevicePath \"\"" Dec 03 17:43:02 crc kubenswrapper[5002]: I1203 17:43:02.054783 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2303dfc5-cc17-43c5-8170-5bfa3374baa6-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:43:02 crc kubenswrapper[5002]: I1203 17:43:02.089905 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2303dfc5-cc17-43c5-8170-5bfa3374baa6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2303dfc5-cc17-43c5-8170-5bfa3374baa6" (UID: "2303dfc5-cc17-43c5-8170-5bfa3374baa6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:43:02 crc kubenswrapper[5002]: I1203 17:43:02.155276 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2303dfc5-cc17-43c5-8170-5bfa3374baa6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:43:02 crc kubenswrapper[5002]: I1203 17:43:02.257541 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkjtp" event={"ID":"2303dfc5-cc17-43c5-8170-5bfa3374baa6","Type":"ContainerDied","Data":"e22947c2a9a6d88b708c6002782d499860a04ca19788fd2a423b7b396fc05eee"} Dec 03 17:43:02 crc kubenswrapper[5002]: I1203 17:43:02.257613 5002 scope.go:117] "RemoveContainer" containerID="63ed866f31bff6c9433aca711ac82650a78d246848c776bb04118eb3cb1073b2" Dec 03 17:43:02 crc kubenswrapper[5002]: I1203 17:43:02.257636 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pkjtp" Dec 03 17:43:02 crc kubenswrapper[5002]: I1203 17:43:02.285139 5002 scope.go:117] "RemoveContainer" containerID="8945ad89fc5b196516413bcf297c1afb26a97badc18967fba84553fb124a832f" Dec 03 17:43:02 crc kubenswrapper[5002]: I1203 17:43:02.305695 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pkjtp"] Dec 03 17:43:02 crc kubenswrapper[5002]: I1203 17:43:02.312285 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pkjtp"] Dec 03 17:43:02 crc kubenswrapper[5002]: I1203 17:43:02.318213 5002 scope.go:117] "RemoveContainer" containerID="10465c8393093d17f0bd2b464ce89485774883ca24d9db14be1d6eff8c202ce6" Dec 03 17:43:02 crc kubenswrapper[5002]: I1203 17:43:02.853340 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2303dfc5-cc17-43c5-8170-5bfa3374baa6" path="/var/lib/kubelet/pods/2303dfc5-cc17-43c5-8170-5bfa3374baa6/volumes" Dec 03 17:43:20 crc kubenswrapper[5002]: I1203 17:43:20.916538 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:43:20 crc kubenswrapper[5002]: I1203 17:43:20.917572 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:43:50 crc kubenswrapper[5002]: I1203 17:43:50.916407 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:43:50 crc kubenswrapper[5002]: I1203 17:43:50.917072 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:43:50 crc kubenswrapper[5002]: I1203 17:43:50.917122 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 17:43:50 crc kubenswrapper[5002]: I1203 17:43:50.917856 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"98c44593aacea9e973d951622736b6dc67898054e86b410ba52f03e08d1845d1"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:43:50 crc kubenswrapper[5002]: I1203 17:43:50.917920 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://98c44593aacea9e973d951622736b6dc67898054e86b410ba52f03e08d1845d1" gracePeriod=600 Dec 03 17:43:51 crc kubenswrapper[5002]: I1203 17:43:51.645567 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="98c44593aacea9e973d951622736b6dc67898054e86b410ba52f03e08d1845d1" exitCode=0 Dec 03 17:43:51 crc kubenswrapper[5002]: I1203 17:43:51.645633 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"98c44593aacea9e973d951622736b6dc67898054e86b410ba52f03e08d1845d1"} Dec 03 17:43:51 crc kubenswrapper[5002]: I1203 17:43:51.645906 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc"} Dec 03 17:43:51 crc kubenswrapper[5002]: I1203 17:43:51.645924 5002 scope.go:117] "RemoveContainer" containerID="c5abff3abb8a34f533d766a6635d2887ceda0876e26371d5fc0bf027a8a5f071" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.183280 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6"] Dec 03 17:45:00 crc kubenswrapper[5002]: E1203 17:45:00.184228 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2303dfc5-cc17-43c5-8170-5bfa3374baa6" containerName="extract-content" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.184247 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2303dfc5-cc17-43c5-8170-5bfa3374baa6" containerName="extract-content" Dec 03 17:45:00 crc kubenswrapper[5002]: E1203 17:45:00.184261 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2303dfc5-cc17-43c5-8170-5bfa3374baa6" containerName="registry-server" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.184270 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2303dfc5-cc17-43c5-8170-5bfa3374baa6" containerName="registry-server" Dec 03 17:45:00 crc kubenswrapper[5002]: E1203 17:45:00.184289 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2303dfc5-cc17-43c5-8170-5bfa3374baa6" containerName="extract-utilities" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.184298 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2303dfc5-cc17-43c5-8170-5bfa3374baa6" containerName="extract-utilities" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.184470 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2303dfc5-cc17-43c5-8170-5bfa3374baa6" containerName="registry-server" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.185045 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.187060 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.192358 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.203333 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6"] Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.285268 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a5ec183-be05-4b60-8c77-18d67398bc24-config-volume\") pod \"collect-profiles-29413065-55km6\" (UID: \"4a5ec183-be05-4b60-8c77-18d67398bc24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.285436 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a5ec183-be05-4b60-8c77-18d67398bc24-secret-volume\") pod \"collect-profiles-29413065-55km6\" (UID: \"4a5ec183-be05-4b60-8c77-18d67398bc24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.285525 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm7rr\" (UniqueName: \"kubernetes.io/projected/4a5ec183-be05-4b60-8c77-18d67398bc24-kube-api-access-sm7rr\") pod \"collect-profiles-29413065-55km6\" (UID: \"4a5ec183-be05-4b60-8c77-18d67398bc24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.386233 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a5ec183-be05-4b60-8c77-18d67398bc24-config-volume\") pod \"collect-profiles-29413065-55km6\" (UID: \"4a5ec183-be05-4b60-8c77-18d67398bc24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.386313 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a5ec183-be05-4b60-8c77-18d67398bc24-secret-volume\") pod \"collect-profiles-29413065-55km6\" (UID: \"4a5ec183-be05-4b60-8c77-18d67398bc24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.386359 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm7rr\" (UniqueName: \"kubernetes.io/projected/4a5ec183-be05-4b60-8c77-18d67398bc24-kube-api-access-sm7rr\") pod \"collect-profiles-29413065-55km6\" (UID: \"4a5ec183-be05-4b60-8c77-18d67398bc24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.387339 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a5ec183-be05-4b60-8c77-18d67398bc24-config-volume\") pod \"collect-profiles-29413065-55km6\" (UID: \"4a5ec183-be05-4b60-8c77-18d67398bc24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.440146 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a5ec183-be05-4b60-8c77-18d67398bc24-secret-volume\") pod \"collect-profiles-29413065-55km6\" (UID: \"4a5ec183-be05-4b60-8c77-18d67398bc24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.440449 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm7rr\" (UniqueName: \"kubernetes.io/projected/4a5ec183-be05-4b60-8c77-18d67398bc24-kube-api-access-sm7rr\") pod \"collect-profiles-29413065-55km6\" (UID: \"4a5ec183-be05-4b60-8c77-18d67398bc24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.506772 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:00 crc kubenswrapper[5002]: I1203 17:45:00.728499 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6"] Dec 03 17:45:01 crc kubenswrapper[5002]: I1203 17:45:01.165961 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" event={"ID":"4a5ec183-be05-4b60-8c77-18d67398bc24","Type":"ContainerStarted","Data":"d37f423569a59d69e405c6d9d76f046494a6a26c4ff1e1b7261afb8716ab39c9"} Dec 03 17:45:01 crc kubenswrapper[5002]: I1203 17:45:01.166246 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" event={"ID":"4a5ec183-be05-4b60-8c77-18d67398bc24","Type":"ContainerStarted","Data":"e2d64e72754fd9565a10f713cbcb745186ad843df9bcca2af404da038aa77866"} Dec 03 17:45:01 crc kubenswrapper[5002]: I1203 17:45:01.181921 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" podStartSLOduration=1.181903198 podStartE2EDuration="1.181903198s" podCreationTimestamp="2025-12-03 17:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:45:01.179820702 +0000 UTC m=+4424.593642590" watchObservedRunningTime="2025-12-03 17:45:01.181903198 +0000 UTC m=+4424.595725086" Dec 03 17:45:02 crc kubenswrapper[5002]: I1203 17:45:02.173862 5002 generic.go:334] "Generic (PLEG): container finished" podID="4a5ec183-be05-4b60-8c77-18d67398bc24" containerID="d37f423569a59d69e405c6d9d76f046494a6a26c4ff1e1b7261afb8716ab39c9" exitCode=0 Dec 03 17:45:02 crc kubenswrapper[5002]: I1203 17:45:02.173909 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" event={"ID":"4a5ec183-be05-4b60-8c77-18d67398bc24","Type":"ContainerDied","Data":"d37f423569a59d69e405c6d9d76f046494a6a26c4ff1e1b7261afb8716ab39c9"} Dec 03 17:45:03 crc kubenswrapper[5002]: I1203 17:45:03.474638 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:03 crc kubenswrapper[5002]: I1203 17:45:03.528309 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a5ec183-be05-4b60-8c77-18d67398bc24-secret-volume\") pod \"4a5ec183-be05-4b60-8c77-18d67398bc24\" (UID: \"4a5ec183-be05-4b60-8c77-18d67398bc24\") " Dec 03 17:45:03 crc kubenswrapper[5002]: I1203 17:45:03.528363 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sm7rr\" (UniqueName: \"kubernetes.io/projected/4a5ec183-be05-4b60-8c77-18d67398bc24-kube-api-access-sm7rr\") pod \"4a5ec183-be05-4b60-8c77-18d67398bc24\" (UID: \"4a5ec183-be05-4b60-8c77-18d67398bc24\") " Dec 03 17:45:03 crc kubenswrapper[5002]: I1203 17:45:03.528482 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a5ec183-be05-4b60-8c77-18d67398bc24-config-volume\") pod \"4a5ec183-be05-4b60-8c77-18d67398bc24\" (UID: \"4a5ec183-be05-4b60-8c77-18d67398bc24\") " Dec 03 17:45:03 crc kubenswrapper[5002]: I1203 17:45:03.530541 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a5ec183-be05-4b60-8c77-18d67398bc24-config-volume" (OuterVolumeSpecName: "config-volume") pod "4a5ec183-be05-4b60-8c77-18d67398bc24" (UID: "4a5ec183-be05-4b60-8c77-18d67398bc24"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:45:03 crc kubenswrapper[5002]: I1203 17:45:03.534953 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a5ec183-be05-4b60-8c77-18d67398bc24-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4a5ec183-be05-4b60-8c77-18d67398bc24" (UID: "4a5ec183-be05-4b60-8c77-18d67398bc24"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:45:03 crc kubenswrapper[5002]: I1203 17:45:03.536996 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a5ec183-be05-4b60-8c77-18d67398bc24-kube-api-access-sm7rr" (OuterVolumeSpecName: "kube-api-access-sm7rr") pod "4a5ec183-be05-4b60-8c77-18d67398bc24" (UID: "4a5ec183-be05-4b60-8c77-18d67398bc24"). InnerVolumeSpecName "kube-api-access-sm7rr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:45:03 crc kubenswrapper[5002]: I1203 17:45:03.629529 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a5ec183-be05-4b60-8c77-18d67398bc24-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:45:03 crc kubenswrapper[5002]: I1203 17:45:03.629578 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sm7rr\" (UniqueName: \"kubernetes.io/projected/4a5ec183-be05-4b60-8c77-18d67398bc24-kube-api-access-sm7rr\") on node \"crc\" DevicePath \"\"" Dec 03 17:45:03 crc kubenswrapper[5002]: I1203 17:45:03.629623 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a5ec183-be05-4b60-8c77-18d67398bc24-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:45:04 crc kubenswrapper[5002]: I1203 17:45:04.188054 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" event={"ID":"4a5ec183-be05-4b60-8c77-18d67398bc24","Type":"ContainerDied","Data":"e2d64e72754fd9565a10f713cbcb745186ad843df9bcca2af404da038aa77866"} Dec 03 17:45:04 crc kubenswrapper[5002]: I1203 17:45:04.188092 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2d64e72754fd9565a10f713cbcb745186ad843df9bcca2af404da038aa77866" Dec 03 17:45:04 crc kubenswrapper[5002]: I1203 17:45:04.188107 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6" Dec 03 17:45:04 crc kubenswrapper[5002]: I1203 17:45:04.258453 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl"] Dec 03 17:45:04 crc kubenswrapper[5002]: I1203 17:45:04.264442 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413020-5jzjl"] Dec 03 17:45:04 crc kubenswrapper[5002]: I1203 17:45:04.849951 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9" path="/var/lib/kubelet/pods/9cb54a7e-8f20-4ea7-8ecc-83ec589a2cd9/volumes" Dec 03 17:45:40 crc kubenswrapper[5002]: I1203 17:45:40.758479 5002 scope.go:117] "RemoveContainer" containerID="f993cadd821fb3a410a133b7617f5f1ee7d7a3f9f06159ef576da405cac3d017" Dec 03 17:46:20 crc kubenswrapper[5002]: I1203 17:46:20.916298 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:46:20 crc kubenswrapper[5002]: I1203 17:46:20.917184 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:46:50 crc kubenswrapper[5002]: I1203 17:46:50.916606 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:46:50 crc kubenswrapper[5002]: I1203 17:46:50.917305 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:47:20 crc kubenswrapper[5002]: I1203 17:47:20.916660 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:47:20 crc kubenswrapper[5002]: I1203 17:47:20.917351 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:47:20 crc kubenswrapper[5002]: I1203 17:47:20.917428 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 17:47:20 crc kubenswrapper[5002]: I1203 17:47:20.918732 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:47:20 crc kubenswrapper[5002]: I1203 17:47:20.918882 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" gracePeriod=600 Dec 03 17:47:21 crc kubenswrapper[5002]: E1203 17:47:21.546377 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:47:22 crc kubenswrapper[5002]: I1203 17:47:22.301702 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" exitCode=0 Dec 03 17:47:22 crc kubenswrapper[5002]: I1203 17:47:22.301772 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc"} Dec 03 17:47:22 crc kubenswrapper[5002]: I1203 17:47:22.301806 5002 scope.go:117] "RemoveContainer" containerID="98c44593aacea9e973d951622736b6dc67898054e86b410ba52f03e08d1845d1" Dec 03 17:47:22 crc kubenswrapper[5002]: I1203 17:47:22.302486 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:47:22 crc kubenswrapper[5002]: E1203 17:47:22.302957 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:47:33 crc kubenswrapper[5002]: I1203 17:47:33.841321 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:47:33 crc kubenswrapper[5002]: E1203 17:47:33.842056 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:47:44 crc kubenswrapper[5002]: I1203 17:47:44.840400 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:47:44 crc kubenswrapper[5002]: E1203 17:47:44.842544 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:47:55 crc kubenswrapper[5002]: I1203 17:47:55.841057 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:47:55 crc kubenswrapper[5002]: E1203 17:47:55.842093 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:48:09 crc kubenswrapper[5002]: I1203 17:48:09.840850 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:48:09 crc kubenswrapper[5002]: E1203 17:48:09.841619 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:48:24 crc kubenswrapper[5002]: I1203 17:48:24.841455 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:48:24 crc kubenswrapper[5002]: E1203 17:48:24.843458 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.732671 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qm66f"] Dec 03 17:48:29 crc kubenswrapper[5002]: E1203 17:48:29.733478 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a5ec183-be05-4b60-8c77-18d67398bc24" containerName="collect-profiles" Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.733490 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a5ec183-be05-4b60-8c77-18d67398bc24" containerName="collect-profiles" Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.733652 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a5ec183-be05-4b60-8c77-18d67398bc24" containerName="collect-profiles" Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.734657 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.750810 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qm66f"] Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.893588 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d91d6bc0-825e-406e-bac3-072f061839d9-catalog-content\") pod \"redhat-marketplace-qm66f\" (UID: \"d91d6bc0-825e-406e-bac3-072f061839d9\") " pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.893667 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d91d6bc0-825e-406e-bac3-072f061839d9-utilities\") pod \"redhat-marketplace-qm66f\" (UID: \"d91d6bc0-825e-406e-bac3-072f061839d9\") " pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.893694 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6q9b\" (UniqueName: \"kubernetes.io/projected/d91d6bc0-825e-406e-bac3-072f061839d9-kube-api-access-j6q9b\") pod \"redhat-marketplace-qm66f\" (UID: \"d91d6bc0-825e-406e-bac3-072f061839d9\") " pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.995013 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d91d6bc0-825e-406e-bac3-072f061839d9-utilities\") pod \"redhat-marketplace-qm66f\" (UID: \"d91d6bc0-825e-406e-bac3-072f061839d9\") " pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.995088 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6q9b\" (UniqueName: \"kubernetes.io/projected/d91d6bc0-825e-406e-bac3-072f061839d9-kube-api-access-j6q9b\") pod \"redhat-marketplace-qm66f\" (UID: \"d91d6bc0-825e-406e-bac3-072f061839d9\") " pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.995251 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d91d6bc0-825e-406e-bac3-072f061839d9-catalog-content\") pod \"redhat-marketplace-qm66f\" (UID: \"d91d6bc0-825e-406e-bac3-072f061839d9\") " pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.995700 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d91d6bc0-825e-406e-bac3-072f061839d9-utilities\") pod \"redhat-marketplace-qm66f\" (UID: \"d91d6bc0-825e-406e-bac3-072f061839d9\") " pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:29 crc kubenswrapper[5002]: I1203 17:48:29.995809 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d91d6bc0-825e-406e-bac3-072f061839d9-catalog-content\") pod \"redhat-marketplace-qm66f\" (UID: \"d91d6bc0-825e-406e-bac3-072f061839d9\") " pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:30 crc kubenswrapper[5002]: I1203 17:48:30.016528 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6q9b\" (UniqueName: \"kubernetes.io/projected/d91d6bc0-825e-406e-bac3-072f061839d9-kube-api-access-j6q9b\") pod \"redhat-marketplace-qm66f\" (UID: \"d91d6bc0-825e-406e-bac3-072f061839d9\") " pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:30 crc kubenswrapper[5002]: I1203 17:48:30.056271 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:30 crc kubenswrapper[5002]: I1203 17:48:30.523083 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qm66f"] Dec 03 17:48:30 crc kubenswrapper[5002]: I1203 17:48:30.822843 5002 generic.go:334] "Generic (PLEG): container finished" podID="d91d6bc0-825e-406e-bac3-072f061839d9" containerID="5380bf9f1c63654e34ba533ad7ce0f841a74242ad39452447dfad3cd848b6447" exitCode=0 Dec 03 17:48:30 crc kubenswrapper[5002]: I1203 17:48:30.822887 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qm66f" event={"ID":"d91d6bc0-825e-406e-bac3-072f061839d9","Type":"ContainerDied","Data":"5380bf9f1c63654e34ba533ad7ce0f841a74242ad39452447dfad3cd848b6447"} Dec 03 17:48:30 crc kubenswrapper[5002]: I1203 17:48:30.822916 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qm66f" event={"ID":"d91d6bc0-825e-406e-bac3-072f061839d9","Type":"ContainerStarted","Data":"b906eced0dc810599294486b39f7b89d5436ac532391a5195bd2241073fc9a14"} Dec 03 17:48:30 crc kubenswrapper[5002]: I1203 17:48:30.825091 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:48:32 crc kubenswrapper[5002]: I1203 17:48:32.842743 5002 generic.go:334] "Generic (PLEG): container finished" podID="d91d6bc0-825e-406e-bac3-072f061839d9" containerID="3e82a2e20ff3516eb22db5763ba54bd71b3da52af2ca63a6e327e4ade793c408" exitCode=0 Dec 03 17:48:32 crc kubenswrapper[5002]: I1203 17:48:32.863514 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qm66f" event={"ID":"d91d6bc0-825e-406e-bac3-072f061839d9","Type":"ContainerDied","Data":"3e82a2e20ff3516eb22db5763ba54bd71b3da52af2ca63a6e327e4ade793c408"} Dec 03 17:48:33 crc kubenswrapper[5002]: I1203 17:48:33.851238 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qm66f" event={"ID":"d91d6bc0-825e-406e-bac3-072f061839d9","Type":"ContainerStarted","Data":"ced435eb2af112a78501e76957e421969a738b812eb2037c20beb1df89522f24"} Dec 03 17:48:33 crc kubenswrapper[5002]: I1203 17:48:33.874325 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qm66f" podStartSLOduration=2.294644721 podStartE2EDuration="4.874305768s" podCreationTimestamp="2025-12-03 17:48:29 +0000 UTC" firstStartedPulling="2025-12-03 17:48:30.824893576 +0000 UTC m=+4634.238715464" lastFinishedPulling="2025-12-03 17:48:33.404554623 +0000 UTC m=+4636.818376511" observedRunningTime="2025-12-03 17:48:33.871396889 +0000 UTC m=+4637.285218777" watchObservedRunningTime="2025-12-03 17:48:33.874305768 +0000 UTC m=+4637.288127656" Dec 03 17:48:35 crc kubenswrapper[5002]: I1203 17:48:35.841027 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:48:35 crc kubenswrapper[5002]: E1203 17:48:35.841297 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:48:40 crc kubenswrapper[5002]: I1203 17:48:40.056853 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:40 crc kubenswrapper[5002]: I1203 17:48:40.057466 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:40 crc kubenswrapper[5002]: I1203 17:48:40.284313 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:40 crc kubenswrapper[5002]: I1203 17:48:40.945958 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:40 crc kubenswrapper[5002]: I1203 17:48:40.993052 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qm66f"] Dec 03 17:48:42 crc kubenswrapper[5002]: I1203 17:48:42.919236 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qm66f" podUID="d91d6bc0-825e-406e-bac3-072f061839d9" containerName="registry-server" containerID="cri-o://ced435eb2af112a78501e76957e421969a738b812eb2037c20beb1df89522f24" gracePeriod=2 Dec 03 17:48:43 crc kubenswrapper[5002]: I1203 17:48:43.930362 5002 generic.go:334] "Generic (PLEG): container finished" podID="d91d6bc0-825e-406e-bac3-072f061839d9" containerID="ced435eb2af112a78501e76957e421969a738b812eb2037c20beb1df89522f24" exitCode=0 Dec 03 17:48:43 crc kubenswrapper[5002]: I1203 17:48:43.930450 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qm66f" event={"ID":"d91d6bc0-825e-406e-bac3-072f061839d9","Type":"ContainerDied","Data":"ced435eb2af112a78501e76957e421969a738b812eb2037c20beb1df89522f24"} Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.568072 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.715842 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6q9b\" (UniqueName: \"kubernetes.io/projected/d91d6bc0-825e-406e-bac3-072f061839d9-kube-api-access-j6q9b\") pod \"d91d6bc0-825e-406e-bac3-072f061839d9\" (UID: \"d91d6bc0-825e-406e-bac3-072f061839d9\") " Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.716120 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d91d6bc0-825e-406e-bac3-072f061839d9-utilities\") pod \"d91d6bc0-825e-406e-bac3-072f061839d9\" (UID: \"d91d6bc0-825e-406e-bac3-072f061839d9\") " Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.716151 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d91d6bc0-825e-406e-bac3-072f061839d9-catalog-content\") pod \"d91d6bc0-825e-406e-bac3-072f061839d9\" (UID: \"d91d6bc0-825e-406e-bac3-072f061839d9\") " Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.717396 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d91d6bc0-825e-406e-bac3-072f061839d9-utilities" (OuterVolumeSpecName: "utilities") pod "d91d6bc0-825e-406e-bac3-072f061839d9" (UID: "d91d6bc0-825e-406e-bac3-072f061839d9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.723910 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d91d6bc0-825e-406e-bac3-072f061839d9-kube-api-access-j6q9b" (OuterVolumeSpecName: "kube-api-access-j6q9b") pod "d91d6bc0-825e-406e-bac3-072f061839d9" (UID: "d91d6bc0-825e-406e-bac3-072f061839d9"). InnerVolumeSpecName "kube-api-access-j6q9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.736422 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d91d6bc0-825e-406e-bac3-072f061839d9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d91d6bc0-825e-406e-bac3-072f061839d9" (UID: "d91d6bc0-825e-406e-bac3-072f061839d9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.818436 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6q9b\" (UniqueName: \"kubernetes.io/projected/d91d6bc0-825e-406e-bac3-072f061839d9-kube-api-access-j6q9b\") on node \"crc\" DevicePath \"\"" Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.818497 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d91d6bc0-825e-406e-bac3-072f061839d9-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.818521 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d91d6bc0-825e-406e-bac3-072f061839d9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.941982 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qm66f" event={"ID":"d91d6bc0-825e-406e-bac3-072f061839d9","Type":"ContainerDied","Data":"b906eced0dc810599294486b39f7b89d5436ac532391a5195bd2241073fc9a14"} Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.942086 5002 scope.go:117] "RemoveContainer" containerID="ced435eb2af112a78501e76957e421969a738b812eb2037c20beb1df89522f24" Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.942129 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qm66f" Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.979254 5002 scope.go:117] "RemoveContainer" containerID="3e82a2e20ff3516eb22db5763ba54bd71b3da52af2ca63a6e327e4ade793c408" Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.981980 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qm66f"] Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.991715 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qm66f"] Dec 03 17:48:44 crc kubenswrapper[5002]: I1203 17:48:44.999945 5002 scope.go:117] "RemoveContainer" containerID="5380bf9f1c63654e34ba533ad7ce0f841a74242ad39452447dfad3cd848b6447" Dec 03 17:48:46 crc kubenswrapper[5002]: I1203 17:48:46.851622 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d91d6bc0-825e-406e-bac3-072f061839d9" path="/var/lib/kubelet/pods/d91d6bc0-825e-406e-bac3-072f061839d9/volumes" Dec 03 17:48:49 crc kubenswrapper[5002]: I1203 17:48:49.840101 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:48:49 crc kubenswrapper[5002]: E1203 17:48:49.842251 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:49:00 crc kubenswrapper[5002]: I1203 17:49:00.840060 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:49:00 crc kubenswrapper[5002]: E1203 17:49:00.840771 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:49:15 crc kubenswrapper[5002]: I1203 17:49:15.840273 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:49:15 crc kubenswrapper[5002]: E1203 17:49:15.841032 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:49:30 crc kubenswrapper[5002]: I1203 17:49:30.840136 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:49:30 crc kubenswrapper[5002]: E1203 17:49:30.841975 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:49:41 crc kubenswrapper[5002]: I1203 17:49:41.840254 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:49:41 crc kubenswrapper[5002]: E1203 17:49:41.840985 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:49:53 crc kubenswrapper[5002]: I1203 17:49:53.841588 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:49:53 crc kubenswrapper[5002]: E1203 17:49:53.842996 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:50:08 crc kubenswrapper[5002]: I1203 17:50:08.840457 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:50:08 crc kubenswrapper[5002]: E1203 17:50:08.841327 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:50:20 crc kubenswrapper[5002]: I1203 17:50:20.840472 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:50:20 crc kubenswrapper[5002]: E1203 17:50:20.841997 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:50:35 crc kubenswrapper[5002]: I1203 17:50:35.840289 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:50:35 crc kubenswrapper[5002]: E1203 17:50:35.841077 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:50:46 crc kubenswrapper[5002]: I1203 17:50:46.850604 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:50:46 crc kubenswrapper[5002]: E1203 17:50:46.851867 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:51:00 crc kubenswrapper[5002]: I1203 17:51:00.840270 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:51:00 crc kubenswrapper[5002]: E1203 17:51:00.841248 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:51:11 crc kubenswrapper[5002]: I1203 17:51:11.840627 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:51:11 crc kubenswrapper[5002]: E1203 17:51:11.841325 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:51:25 crc kubenswrapper[5002]: I1203 17:51:25.841378 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:51:25 crc kubenswrapper[5002]: E1203 17:51:25.842822 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:51:40 crc kubenswrapper[5002]: I1203 17:51:40.840445 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:51:40 crc kubenswrapper[5002]: E1203 17:51:40.841321 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.310171 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nsvhb"] Dec 03 17:51:46 crc kubenswrapper[5002]: E1203 17:51:46.311117 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d6bc0-825e-406e-bac3-072f061839d9" containerName="registry-server" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.311135 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d6bc0-825e-406e-bac3-072f061839d9" containerName="registry-server" Dec 03 17:51:46 crc kubenswrapper[5002]: E1203 17:51:46.311153 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d6bc0-825e-406e-bac3-072f061839d9" containerName="extract-content" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.311161 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d6bc0-825e-406e-bac3-072f061839d9" containerName="extract-content" Dec 03 17:51:46 crc kubenswrapper[5002]: E1203 17:51:46.311202 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d6bc0-825e-406e-bac3-072f061839d9" containerName="extract-utilities" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.311212 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d6bc0-825e-406e-bac3-072f061839d9" containerName="extract-utilities" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.311439 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d6bc0-825e-406e-bac3-072f061839d9" containerName="registry-server" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.312984 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.346488 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nsvhb"] Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.373616 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tj28d\" (UniqueName: \"kubernetes.io/projected/72946c74-261c-4354-8c93-9d59e162b38b-kube-api-access-tj28d\") pod \"certified-operators-nsvhb\" (UID: \"72946c74-261c-4354-8c93-9d59e162b38b\") " pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.373836 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72946c74-261c-4354-8c93-9d59e162b38b-catalog-content\") pod \"certified-operators-nsvhb\" (UID: \"72946c74-261c-4354-8c93-9d59e162b38b\") " pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.373892 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72946c74-261c-4354-8c93-9d59e162b38b-utilities\") pod \"certified-operators-nsvhb\" (UID: \"72946c74-261c-4354-8c93-9d59e162b38b\") " pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.475450 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72946c74-261c-4354-8c93-9d59e162b38b-catalog-content\") pod \"certified-operators-nsvhb\" (UID: \"72946c74-261c-4354-8c93-9d59e162b38b\") " pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.475774 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72946c74-261c-4354-8c93-9d59e162b38b-utilities\") pod \"certified-operators-nsvhb\" (UID: \"72946c74-261c-4354-8c93-9d59e162b38b\") " pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.475887 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tj28d\" (UniqueName: \"kubernetes.io/projected/72946c74-261c-4354-8c93-9d59e162b38b-kube-api-access-tj28d\") pod \"certified-operators-nsvhb\" (UID: \"72946c74-261c-4354-8c93-9d59e162b38b\") " pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.476623 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72946c74-261c-4354-8c93-9d59e162b38b-catalog-content\") pod \"certified-operators-nsvhb\" (UID: \"72946c74-261c-4354-8c93-9d59e162b38b\") " pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.476982 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72946c74-261c-4354-8c93-9d59e162b38b-utilities\") pod \"certified-operators-nsvhb\" (UID: \"72946c74-261c-4354-8c93-9d59e162b38b\") " pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.497317 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tj28d\" (UniqueName: \"kubernetes.io/projected/72946c74-261c-4354-8c93-9d59e162b38b-kube-api-access-tj28d\") pod \"certified-operators-nsvhb\" (UID: \"72946c74-261c-4354-8c93-9d59e162b38b\") " pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:46 crc kubenswrapper[5002]: I1203 17:51:46.644123 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.137501 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nsvhb"] Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.339546 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsvhb" event={"ID":"72946c74-261c-4354-8c93-9d59e162b38b","Type":"ContainerStarted","Data":"5a643921331feeb0b07b9c8bc8ccf23e5f8138eab6a748350a0a990f0ce7d59f"} Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.341470 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsvhb" event={"ID":"72946c74-261c-4354-8c93-9d59e162b38b","Type":"ContainerStarted","Data":"a952988f5a5f942c636b0f822b5c13b05de48d3ce1a4850d627c3ce68c2854cb"} Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.701662 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zwvcz"] Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.704286 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.714372 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zwvcz"] Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.794851 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d862499d-4ace-41c6-8bee-6977d5b9cffc-catalog-content\") pod \"community-operators-zwvcz\" (UID: \"d862499d-4ace-41c6-8bee-6977d5b9cffc\") " pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.794958 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjq8c\" (UniqueName: \"kubernetes.io/projected/d862499d-4ace-41c6-8bee-6977d5b9cffc-kube-api-access-bjq8c\") pod \"community-operators-zwvcz\" (UID: \"d862499d-4ace-41c6-8bee-6977d5b9cffc\") " pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.795031 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d862499d-4ace-41c6-8bee-6977d5b9cffc-utilities\") pod \"community-operators-zwvcz\" (UID: \"d862499d-4ace-41c6-8bee-6977d5b9cffc\") " pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.895495 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d862499d-4ace-41c6-8bee-6977d5b9cffc-catalog-content\") pod \"community-operators-zwvcz\" (UID: \"d862499d-4ace-41c6-8bee-6977d5b9cffc\") " pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.895599 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjq8c\" (UniqueName: \"kubernetes.io/projected/d862499d-4ace-41c6-8bee-6977d5b9cffc-kube-api-access-bjq8c\") pod \"community-operators-zwvcz\" (UID: \"d862499d-4ace-41c6-8bee-6977d5b9cffc\") " pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.895666 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d862499d-4ace-41c6-8bee-6977d5b9cffc-utilities\") pod \"community-operators-zwvcz\" (UID: \"d862499d-4ace-41c6-8bee-6977d5b9cffc\") " pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.896452 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d862499d-4ace-41c6-8bee-6977d5b9cffc-catalog-content\") pod \"community-operators-zwvcz\" (UID: \"d862499d-4ace-41c6-8bee-6977d5b9cffc\") " pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.896706 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d862499d-4ace-41c6-8bee-6977d5b9cffc-utilities\") pod \"community-operators-zwvcz\" (UID: \"d862499d-4ace-41c6-8bee-6977d5b9cffc\") " pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:47 crc kubenswrapper[5002]: I1203 17:51:47.924035 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjq8c\" (UniqueName: \"kubernetes.io/projected/d862499d-4ace-41c6-8bee-6977d5b9cffc-kube-api-access-bjq8c\") pod \"community-operators-zwvcz\" (UID: \"d862499d-4ace-41c6-8bee-6977d5b9cffc\") " pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:48 crc kubenswrapper[5002]: I1203 17:51:48.044678 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:48 crc kubenswrapper[5002]: I1203 17:51:48.347780 5002 generic.go:334] "Generic (PLEG): container finished" podID="72946c74-261c-4354-8c93-9d59e162b38b" containerID="5a643921331feeb0b07b9c8bc8ccf23e5f8138eab6a748350a0a990f0ce7d59f" exitCode=0 Dec 03 17:51:48 crc kubenswrapper[5002]: I1203 17:51:48.347817 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsvhb" event={"ID":"72946c74-261c-4354-8c93-9d59e162b38b","Type":"ContainerDied","Data":"5a643921331feeb0b07b9c8bc8ccf23e5f8138eab6a748350a0a990f0ce7d59f"} Dec 03 17:51:48 crc kubenswrapper[5002]: I1203 17:51:48.531072 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zwvcz"] Dec 03 17:51:48 crc kubenswrapper[5002]: W1203 17:51:48.535718 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd862499d_4ace_41c6_8bee_6977d5b9cffc.slice/crio-6e14b0d522afda086725c5c83d8b7297bff6de7320e9e39644f029e39d114341 WatchSource:0}: Error finding container 6e14b0d522afda086725c5c83d8b7297bff6de7320e9e39644f029e39d114341: Status 404 returned error can't find the container with id 6e14b0d522afda086725c5c83d8b7297bff6de7320e9e39644f029e39d114341 Dec 03 17:51:49 crc kubenswrapper[5002]: I1203 17:51:49.363859 5002 generic.go:334] "Generic (PLEG): container finished" podID="72946c74-261c-4354-8c93-9d59e162b38b" containerID="08b66ecff5f4175d1c088c99fad577fcfc7fa3c00da4c446f7bf87609803817f" exitCode=0 Dec 03 17:51:49 crc kubenswrapper[5002]: I1203 17:51:49.363927 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsvhb" event={"ID":"72946c74-261c-4354-8c93-9d59e162b38b","Type":"ContainerDied","Data":"08b66ecff5f4175d1c088c99fad577fcfc7fa3c00da4c446f7bf87609803817f"} Dec 03 17:51:49 crc kubenswrapper[5002]: I1203 17:51:49.365381 5002 generic.go:334] "Generic (PLEG): container finished" podID="d862499d-4ace-41c6-8bee-6977d5b9cffc" containerID="12d01b65f6618c189daeb0865b4c8b8235e2e21dd982543ab0fa52e221d606ea" exitCode=0 Dec 03 17:51:49 crc kubenswrapper[5002]: I1203 17:51:49.365400 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zwvcz" event={"ID":"d862499d-4ace-41c6-8bee-6977d5b9cffc","Type":"ContainerDied","Data":"12d01b65f6618c189daeb0865b4c8b8235e2e21dd982543ab0fa52e221d606ea"} Dec 03 17:51:49 crc kubenswrapper[5002]: I1203 17:51:49.365413 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zwvcz" event={"ID":"d862499d-4ace-41c6-8bee-6977d5b9cffc","Type":"ContainerStarted","Data":"6e14b0d522afda086725c5c83d8b7297bff6de7320e9e39644f029e39d114341"} Dec 03 17:51:50 crc kubenswrapper[5002]: I1203 17:51:50.375121 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsvhb" event={"ID":"72946c74-261c-4354-8c93-9d59e162b38b","Type":"ContainerStarted","Data":"3517d910dca0ac97eb3768a0ab57d569ffcd730e8f747f5d7356b84d2932d4e5"} Dec 03 17:51:50 crc kubenswrapper[5002]: I1203 17:51:50.377493 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zwvcz" event={"ID":"d862499d-4ace-41c6-8bee-6977d5b9cffc","Type":"ContainerStarted","Data":"fc493934dbf11ba0d65623e9ae127f0666b094d9240425e216fcff1e2b8cac06"} Dec 03 17:51:50 crc kubenswrapper[5002]: I1203 17:51:50.403385 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nsvhb" podStartSLOduration=1.9617125039999999 podStartE2EDuration="4.403364359s" podCreationTimestamp="2025-12-03 17:51:46 +0000 UTC" firstStartedPulling="2025-12-03 17:51:47.342284311 +0000 UTC m=+4830.756106209" lastFinishedPulling="2025-12-03 17:51:49.783936136 +0000 UTC m=+4833.197758064" observedRunningTime="2025-12-03 17:51:50.399307599 +0000 UTC m=+4833.813129497" watchObservedRunningTime="2025-12-03 17:51:50.403364359 +0000 UTC m=+4833.817186247" Dec 03 17:51:51 crc kubenswrapper[5002]: I1203 17:51:51.385230 5002 generic.go:334] "Generic (PLEG): container finished" podID="d862499d-4ace-41c6-8bee-6977d5b9cffc" containerID="fc493934dbf11ba0d65623e9ae127f0666b094d9240425e216fcff1e2b8cac06" exitCode=0 Dec 03 17:51:51 crc kubenswrapper[5002]: I1203 17:51:51.386489 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zwvcz" event={"ID":"d862499d-4ace-41c6-8bee-6977d5b9cffc","Type":"ContainerDied","Data":"fc493934dbf11ba0d65623e9ae127f0666b094d9240425e216fcff1e2b8cac06"} Dec 03 17:51:51 crc kubenswrapper[5002]: I1203 17:51:51.840157 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:51:51 crc kubenswrapper[5002]: E1203 17:51:51.840605 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:51:52 crc kubenswrapper[5002]: I1203 17:51:52.394852 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zwvcz" event={"ID":"d862499d-4ace-41c6-8bee-6977d5b9cffc","Type":"ContainerStarted","Data":"a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259"} Dec 03 17:51:52 crc kubenswrapper[5002]: I1203 17:51:52.419567 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zwvcz" podStartSLOduration=2.873530364 podStartE2EDuration="5.419543054s" podCreationTimestamp="2025-12-03 17:51:47 +0000 UTC" firstStartedPulling="2025-12-03 17:51:49.367234102 +0000 UTC m=+4832.781056030" lastFinishedPulling="2025-12-03 17:51:51.913246822 +0000 UTC m=+4835.327068720" observedRunningTime="2025-12-03 17:51:52.416960075 +0000 UTC m=+4835.830781983" watchObservedRunningTime="2025-12-03 17:51:52.419543054 +0000 UTC m=+4835.833364952" Dec 03 17:51:56 crc kubenswrapper[5002]: I1203 17:51:56.644904 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:56 crc kubenswrapper[5002]: I1203 17:51:56.645573 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:56 crc kubenswrapper[5002]: I1203 17:51:56.693140 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:57 crc kubenswrapper[5002]: I1203 17:51:57.471113 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:51:57 crc kubenswrapper[5002]: I1203 17:51:57.515391 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nsvhb"] Dec 03 17:51:58 crc kubenswrapper[5002]: I1203 17:51:58.044809 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:58 crc kubenswrapper[5002]: I1203 17:51:58.045814 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:58 crc kubenswrapper[5002]: I1203 17:51:58.084687 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:58 crc kubenswrapper[5002]: I1203 17:51:58.487672 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:51:59 crc kubenswrapper[5002]: I1203 17:51:59.325432 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zwvcz"] Dec 03 17:51:59 crc kubenswrapper[5002]: I1203 17:51:59.444307 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nsvhb" podUID="72946c74-261c-4354-8c93-9d59e162b38b" containerName="registry-server" containerID="cri-o://3517d910dca0ac97eb3768a0ab57d569ffcd730e8f747f5d7356b84d2932d4e5" gracePeriod=2 Dec 03 17:52:00 crc kubenswrapper[5002]: I1203 17:52:00.449869 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zwvcz" podUID="d862499d-4ace-41c6-8bee-6977d5b9cffc" containerName="registry-server" containerID="cri-o://a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259" gracePeriod=2 Dec 03 17:52:01 crc kubenswrapper[5002]: I1203 17:52:01.995170 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.105568 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d862499d-4ace-41c6-8bee-6977d5b9cffc-catalog-content\") pod \"d862499d-4ace-41c6-8bee-6977d5b9cffc\" (UID: \"d862499d-4ace-41c6-8bee-6977d5b9cffc\") " Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.105789 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d862499d-4ace-41c6-8bee-6977d5b9cffc-utilities\") pod \"d862499d-4ace-41c6-8bee-6977d5b9cffc\" (UID: \"d862499d-4ace-41c6-8bee-6977d5b9cffc\") " Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.105903 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjq8c\" (UniqueName: \"kubernetes.io/projected/d862499d-4ace-41c6-8bee-6977d5b9cffc-kube-api-access-bjq8c\") pod \"d862499d-4ace-41c6-8bee-6977d5b9cffc\" (UID: \"d862499d-4ace-41c6-8bee-6977d5b9cffc\") " Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.106899 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d862499d-4ace-41c6-8bee-6977d5b9cffc-utilities" (OuterVolumeSpecName: "utilities") pod "d862499d-4ace-41c6-8bee-6977d5b9cffc" (UID: "d862499d-4ace-41c6-8bee-6977d5b9cffc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.111549 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d862499d-4ace-41c6-8bee-6977d5b9cffc-kube-api-access-bjq8c" (OuterVolumeSpecName: "kube-api-access-bjq8c") pod "d862499d-4ace-41c6-8bee-6977d5b9cffc" (UID: "d862499d-4ace-41c6-8bee-6977d5b9cffc"). InnerVolumeSpecName "kube-api-access-bjq8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.162188 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d862499d-4ace-41c6-8bee-6977d5b9cffc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d862499d-4ace-41c6-8bee-6977d5b9cffc" (UID: "d862499d-4ace-41c6-8bee-6977d5b9cffc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.207767 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d862499d-4ace-41c6-8bee-6977d5b9cffc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.207805 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d862499d-4ace-41c6-8bee-6977d5b9cffc-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.207817 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjq8c\" (UniqueName: \"kubernetes.io/projected/d862499d-4ace-41c6-8bee-6977d5b9cffc-kube-api-access-bjq8c\") on node \"crc\" DevicePath \"\"" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.479656 5002 generic.go:334] "Generic (PLEG): container finished" podID="72946c74-261c-4354-8c93-9d59e162b38b" containerID="3517d910dca0ac97eb3768a0ab57d569ffcd730e8f747f5d7356b84d2932d4e5" exitCode=0 Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.479834 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsvhb" event={"ID":"72946c74-261c-4354-8c93-9d59e162b38b","Type":"ContainerDied","Data":"3517d910dca0ac97eb3768a0ab57d569ffcd730e8f747f5d7356b84d2932d4e5"} Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.483214 5002 generic.go:334] "Generic (PLEG): container finished" podID="d862499d-4ace-41c6-8bee-6977d5b9cffc" containerID="a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259" exitCode=0 Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.483257 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zwvcz" event={"ID":"d862499d-4ace-41c6-8bee-6977d5b9cffc","Type":"ContainerDied","Data":"a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259"} Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.483289 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zwvcz" event={"ID":"d862499d-4ace-41c6-8bee-6977d5b9cffc","Type":"ContainerDied","Data":"6e14b0d522afda086725c5c83d8b7297bff6de7320e9e39644f029e39d114341"} Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.483319 5002 scope.go:117] "RemoveContainer" containerID="a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.483474 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zwvcz" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.526640 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zwvcz"] Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.529278 5002 scope.go:117] "RemoveContainer" containerID="fc493934dbf11ba0d65623e9ae127f0666b094d9240425e216fcff1e2b8cac06" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.536558 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zwvcz"] Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.591780 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.596226 5002 scope.go:117] "RemoveContainer" containerID="12d01b65f6618c189daeb0865b4c8b8235e2e21dd982543ab0fa52e221d606ea" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.618704 5002 scope.go:117] "RemoveContainer" containerID="a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259" Dec 03 17:52:02 crc kubenswrapper[5002]: E1203 17:52:02.620066 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259\": container with ID starting with a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259 not found: ID does not exist" containerID="a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.620107 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259"} err="failed to get container status \"a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259\": rpc error: code = NotFound desc = could not find container \"a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259\": container with ID starting with a8ff0aa99d49a0478dfe187915761a86d3e7097a1aedbedca487db1fb678f259 not found: ID does not exist" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.620135 5002 scope.go:117] "RemoveContainer" containerID="fc493934dbf11ba0d65623e9ae127f0666b094d9240425e216fcff1e2b8cac06" Dec 03 17:52:02 crc kubenswrapper[5002]: E1203 17:52:02.620516 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc493934dbf11ba0d65623e9ae127f0666b094d9240425e216fcff1e2b8cac06\": container with ID starting with fc493934dbf11ba0d65623e9ae127f0666b094d9240425e216fcff1e2b8cac06 not found: ID does not exist" containerID="fc493934dbf11ba0d65623e9ae127f0666b094d9240425e216fcff1e2b8cac06" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.620551 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc493934dbf11ba0d65623e9ae127f0666b094d9240425e216fcff1e2b8cac06"} err="failed to get container status \"fc493934dbf11ba0d65623e9ae127f0666b094d9240425e216fcff1e2b8cac06\": rpc error: code = NotFound desc = could not find container \"fc493934dbf11ba0d65623e9ae127f0666b094d9240425e216fcff1e2b8cac06\": container with ID starting with fc493934dbf11ba0d65623e9ae127f0666b094d9240425e216fcff1e2b8cac06 not found: ID does not exist" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.620570 5002 scope.go:117] "RemoveContainer" containerID="12d01b65f6618c189daeb0865b4c8b8235e2e21dd982543ab0fa52e221d606ea" Dec 03 17:52:02 crc kubenswrapper[5002]: E1203 17:52:02.620812 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12d01b65f6618c189daeb0865b4c8b8235e2e21dd982543ab0fa52e221d606ea\": container with ID starting with 12d01b65f6618c189daeb0865b4c8b8235e2e21dd982543ab0fa52e221d606ea not found: ID does not exist" containerID="12d01b65f6618c189daeb0865b4c8b8235e2e21dd982543ab0fa52e221d606ea" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.620846 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12d01b65f6618c189daeb0865b4c8b8235e2e21dd982543ab0fa52e221d606ea"} err="failed to get container status \"12d01b65f6618c189daeb0865b4c8b8235e2e21dd982543ab0fa52e221d606ea\": rpc error: code = NotFound desc = could not find container \"12d01b65f6618c189daeb0865b4c8b8235e2e21dd982543ab0fa52e221d606ea\": container with ID starting with 12d01b65f6618c189daeb0865b4c8b8235e2e21dd982543ab0fa52e221d606ea not found: ID does not exist" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.718929 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72946c74-261c-4354-8c93-9d59e162b38b-catalog-content\") pod \"72946c74-261c-4354-8c93-9d59e162b38b\" (UID: \"72946c74-261c-4354-8c93-9d59e162b38b\") " Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.718996 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tj28d\" (UniqueName: \"kubernetes.io/projected/72946c74-261c-4354-8c93-9d59e162b38b-kube-api-access-tj28d\") pod \"72946c74-261c-4354-8c93-9d59e162b38b\" (UID: \"72946c74-261c-4354-8c93-9d59e162b38b\") " Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.719064 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72946c74-261c-4354-8c93-9d59e162b38b-utilities\") pod \"72946c74-261c-4354-8c93-9d59e162b38b\" (UID: \"72946c74-261c-4354-8c93-9d59e162b38b\") " Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.720368 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72946c74-261c-4354-8c93-9d59e162b38b-utilities" (OuterVolumeSpecName: "utilities") pod "72946c74-261c-4354-8c93-9d59e162b38b" (UID: "72946c74-261c-4354-8c93-9d59e162b38b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.722065 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72946c74-261c-4354-8c93-9d59e162b38b-kube-api-access-tj28d" (OuterVolumeSpecName: "kube-api-access-tj28d") pod "72946c74-261c-4354-8c93-9d59e162b38b" (UID: "72946c74-261c-4354-8c93-9d59e162b38b"). InnerVolumeSpecName "kube-api-access-tj28d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.776372 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72946c74-261c-4354-8c93-9d59e162b38b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "72946c74-261c-4354-8c93-9d59e162b38b" (UID: "72946c74-261c-4354-8c93-9d59e162b38b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.820948 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72946c74-261c-4354-8c93-9d59e162b38b-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.820993 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72946c74-261c-4354-8c93-9d59e162b38b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.821008 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tj28d\" (UniqueName: \"kubernetes.io/projected/72946c74-261c-4354-8c93-9d59e162b38b-kube-api-access-tj28d\") on node \"crc\" DevicePath \"\"" Dec 03 17:52:02 crc kubenswrapper[5002]: I1203 17:52:02.848669 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d862499d-4ace-41c6-8bee-6977d5b9cffc" path="/var/lib/kubelet/pods/d862499d-4ace-41c6-8bee-6977d5b9cffc/volumes" Dec 03 17:52:03 crc kubenswrapper[5002]: I1203 17:52:03.502207 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsvhb" event={"ID":"72946c74-261c-4354-8c93-9d59e162b38b","Type":"ContainerDied","Data":"a952988f5a5f942c636b0f822b5c13b05de48d3ce1a4850d627c3ce68c2854cb"} Dec 03 17:52:03 crc kubenswrapper[5002]: I1203 17:52:03.502303 5002 scope.go:117] "RemoveContainer" containerID="3517d910dca0ac97eb3768a0ab57d569ffcd730e8f747f5d7356b84d2932d4e5" Dec 03 17:52:03 crc kubenswrapper[5002]: I1203 17:52:03.502639 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nsvhb" Dec 03 17:52:03 crc kubenswrapper[5002]: I1203 17:52:03.533666 5002 scope.go:117] "RemoveContainer" containerID="08b66ecff5f4175d1c088c99fad577fcfc7fa3c00da4c446f7bf87609803817f" Dec 03 17:52:03 crc kubenswrapper[5002]: I1203 17:52:03.539649 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nsvhb"] Dec 03 17:52:03 crc kubenswrapper[5002]: I1203 17:52:03.544908 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nsvhb"] Dec 03 17:52:03 crc kubenswrapper[5002]: I1203 17:52:03.555101 5002 scope.go:117] "RemoveContainer" containerID="5a643921331feeb0b07b9c8bc8ccf23e5f8138eab6a748350a0a990f0ce7d59f" Dec 03 17:52:04 crc kubenswrapper[5002]: I1203 17:52:04.840804 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:52:04 crc kubenswrapper[5002]: E1203 17:52:04.841156 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:52:04 crc kubenswrapper[5002]: I1203 17:52:04.857332 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72946c74-261c-4354-8c93-9d59e162b38b" path="/var/lib/kubelet/pods/72946c74-261c-4354-8c93-9d59e162b38b/volumes" Dec 03 17:52:15 crc kubenswrapper[5002]: I1203 17:52:15.841121 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:52:15 crc kubenswrapper[5002]: E1203 17:52:15.842806 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:52:26 crc kubenswrapper[5002]: I1203 17:52:26.845797 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:52:27 crc kubenswrapper[5002]: I1203 17:52:27.705496 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"94e292ef11c6c843fa462b5f23f7bcb46c279bfc5926f3fae23ecbd8649fdcdd"} Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.223566 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-85ktn"] Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.229192 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-85ktn"] Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.343325 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-8f68f"] Dec 03 17:52:54 crc kubenswrapper[5002]: E1203 17:52:54.343958 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d862499d-4ace-41c6-8bee-6977d5b9cffc" containerName="registry-server" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.344062 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d862499d-4ace-41c6-8bee-6977d5b9cffc" containerName="registry-server" Dec 03 17:52:54 crc kubenswrapper[5002]: E1203 17:52:54.344202 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d862499d-4ace-41c6-8bee-6977d5b9cffc" containerName="extract-utilities" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.344279 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d862499d-4ace-41c6-8bee-6977d5b9cffc" containerName="extract-utilities" Dec 03 17:52:54 crc kubenswrapper[5002]: E1203 17:52:54.344364 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d862499d-4ace-41c6-8bee-6977d5b9cffc" containerName="extract-content" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.344434 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d862499d-4ace-41c6-8bee-6977d5b9cffc" containerName="extract-content" Dec 03 17:52:54 crc kubenswrapper[5002]: E1203 17:52:54.344515 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72946c74-261c-4354-8c93-9d59e162b38b" containerName="registry-server" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.344595 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="72946c74-261c-4354-8c93-9d59e162b38b" containerName="registry-server" Dec 03 17:52:54 crc kubenswrapper[5002]: E1203 17:52:54.344679 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72946c74-261c-4354-8c93-9d59e162b38b" containerName="extract-content" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.344766 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="72946c74-261c-4354-8c93-9d59e162b38b" containerName="extract-content" Dec 03 17:52:54 crc kubenswrapper[5002]: E1203 17:52:54.344865 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72946c74-261c-4354-8c93-9d59e162b38b" containerName="extract-utilities" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.344956 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="72946c74-261c-4354-8c93-9d59e162b38b" containerName="extract-utilities" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.345204 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="72946c74-261c-4354-8c93-9d59e162b38b" containerName="registry-server" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.345315 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d862499d-4ace-41c6-8bee-6977d5b9cffc" containerName="registry-server" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.345996 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.348334 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.348601 5002 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-r9hql" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.348689 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.349015 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.354788 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-8f68f"] Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.513603 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qf8kv\" (UniqueName: \"kubernetes.io/projected/232e2a1f-78fa-46a7-a1e8-efd499be5d89-kube-api-access-qf8kv\") pod \"crc-storage-crc-8f68f\" (UID: \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\") " pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.513655 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/232e2a1f-78fa-46a7-a1e8-efd499be5d89-crc-storage\") pod \"crc-storage-crc-8f68f\" (UID: \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\") " pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.513898 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/232e2a1f-78fa-46a7-a1e8-efd499be5d89-node-mnt\") pod \"crc-storage-crc-8f68f\" (UID: \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\") " pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.615031 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qf8kv\" (UniqueName: \"kubernetes.io/projected/232e2a1f-78fa-46a7-a1e8-efd499be5d89-kube-api-access-qf8kv\") pod \"crc-storage-crc-8f68f\" (UID: \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\") " pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.615109 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/232e2a1f-78fa-46a7-a1e8-efd499be5d89-crc-storage\") pod \"crc-storage-crc-8f68f\" (UID: \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\") " pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.615202 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/232e2a1f-78fa-46a7-a1e8-efd499be5d89-node-mnt\") pod \"crc-storage-crc-8f68f\" (UID: \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\") " pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.615648 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/232e2a1f-78fa-46a7-a1e8-efd499be5d89-node-mnt\") pod \"crc-storage-crc-8f68f\" (UID: \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\") " pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.616583 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/232e2a1f-78fa-46a7-a1e8-efd499be5d89-crc-storage\") pod \"crc-storage-crc-8f68f\" (UID: \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\") " pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.633359 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qf8kv\" (UniqueName: \"kubernetes.io/projected/232e2a1f-78fa-46a7-a1e8-efd499be5d89-kube-api-access-qf8kv\") pod \"crc-storage-crc-8f68f\" (UID: \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\") " pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.668919 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:54 crc kubenswrapper[5002]: I1203 17:52:54.848259 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f99802d3-48de-4371-8d44-468b33627dfa" path="/var/lib/kubelet/pods/f99802d3-48de-4371-8d44-468b33627dfa/volumes" Dec 03 17:52:55 crc kubenswrapper[5002]: I1203 17:52:55.119393 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-8f68f"] Dec 03 17:52:55 crc kubenswrapper[5002]: I1203 17:52:55.925439 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-8f68f" event={"ID":"232e2a1f-78fa-46a7-a1e8-efd499be5d89","Type":"ContainerStarted","Data":"e389fb6d345d20c7d84e3b2df1b4c57b04d483a48593175064a76d66aaa28abe"} Dec 03 17:52:55 crc kubenswrapper[5002]: I1203 17:52:55.925831 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-8f68f" event={"ID":"232e2a1f-78fa-46a7-a1e8-efd499be5d89","Type":"ContainerStarted","Data":"0e0c0467959479ec090a19f85b06079f9ea8250c6397db468ccefcaaa23f2783"} Dec 03 17:52:55 crc kubenswrapper[5002]: I1203 17:52:55.941619 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="crc-storage/crc-storage-crc-8f68f" podStartSLOduration=1.479774027 podStartE2EDuration="1.941597694s" podCreationTimestamp="2025-12-03 17:52:54 +0000 UTC" firstStartedPulling="2025-12-03 17:52:55.125363009 +0000 UTC m=+4898.539184897" lastFinishedPulling="2025-12-03 17:52:55.587186666 +0000 UTC m=+4899.001008564" observedRunningTime="2025-12-03 17:52:55.941265425 +0000 UTC m=+4899.355087313" watchObservedRunningTime="2025-12-03 17:52:55.941597694 +0000 UTC m=+4899.355419592" Dec 03 17:52:56 crc kubenswrapper[5002]: I1203 17:52:56.934328 5002 generic.go:334] "Generic (PLEG): container finished" podID="232e2a1f-78fa-46a7-a1e8-efd499be5d89" containerID="e389fb6d345d20c7d84e3b2df1b4c57b04d483a48593175064a76d66aaa28abe" exitCode=0 Dec 03 17:52:56 crc kubenswrapper[5002]: I1203 17:52:56.934379 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-8f68f" event={"ID":"232e2a1f-78fa-46a7-a1e8-efd499be5d89","Type":"ContainerDied","Data":"e389fb6d345d20c7d84e3b2df1b4c57b04d483a48593175064a76d66aaa28abe"} Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.234577 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.366907 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/232e2a1f-78fa-46a7-a1e8-efd499be5d89-node-mnt\") pod \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\" (UID: \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\") " Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.367103 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/232e2a1f-78fa-46a7-a1e8-efd499be5d89-crc-storage\") pod \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\" (UID: \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\") " Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.367113 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/232e2a1f-78fa-46a7-a1e8-efd499be5d89-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "232e2a1f-78fa-46a7-a1e8-efd499be5d89" (UID: "232e2a1f-78fa-46a7-a1e8-efd499be5d89"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.367132 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qf8kv\" (UniqueName: \"kubernetes.io/projected/232e2a1f-78fa-46a7-a1e8-efd499be5d89-kube-api-access-qf8kv\") pod \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\" (UID: \"232e2a1f-78fa-46a7-a1e8-efd499be5d89\") " Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.392370 5002 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/232e2a1f-78fa-46a7-a1e8-efd499be5d89-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.398812 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/232e2a1f-78fa-46a7-a1e8-efd499be5d89-kube-api-access-qf8kv" (OuterVolumeSpecName: "kube-api-access-qf8kv") pod "232e2a1f-78fa-46a7-a1e8-efd499be5d89" (UID: "232e2a1f-78fa-46a7-a1e8-efd499be5d89"). InnerVolumeSpecName "kube-api-access-qf8kv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.403595 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/232e2a1f-78fa-46a7-a1e8-efd499be5d89-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "232e2a1f-78fa-46a7-a1e8-efd499be5d89" (UID: "232e2a1f-78fa-46a7-a1e8-efd499be5d89"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.493855 5002 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/232e2a1f-78fa-46a7-a1e8-efd499be5d89-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.493888 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qf8kv\" (UniqueName: \"kubernetes.io/projected/232e2a1f-78fa-46a7-a1e8-efd499be5d89-kube-api-access-qf8kv\") on node \"crc\" DevicePath \"\"" Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.949773 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-8f68f" event={"ID":"232e2a1f-78fa-46a7-a1e8-efd499be5d89","Type":"ContainerDied","Data":"0e0c0467959479ec090a19f85b06079f9ea8250c6397db468ccefcaaa23f2783"} Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.949834 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-8f68f" Dec 03 17:52:58 crc kubenswrapper[5002]: I1203 17:52:58.949860 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e0c0467959479ec090a19f85b06079f9ea8250c6397db468ccefcaaa23f2783" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.239282 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-8f68f"] Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.247839 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-8f68f"] Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.379614 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-tptgp"] Dec 03 17:53:00 crc kubenswrapper[5002]: E1203 17:53:00.380136 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="232e2a1f-78fa-46a7-a1e8-efd499be5d89" containerName="storage" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.380165 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="232e2a1f-78fa-46a7-a1e8-efd499be5d89" containerName="storage" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.380372 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="232e2a1f-78fa-46a7-a1e8-efd499be5d89" containerName="storage" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.381041 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.383095 5002 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-r9hql" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.383406 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.383425 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.388695 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-tptgp"] Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.392826 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.517990 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/62e0a5b8-3725-4e54-b115-3090181692d2-node-mnt\") pod \"crc-storage-crc-tptgp\" (UID: \"62e0a5b8-3725-4e54-b115-3090181692d2\") " pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.518193 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s66q5\" (UniqueName: \"kubernetes.io/projected/62e0a5b8-3725-4e54-b115-3090181692d2-kube-api-access-s66q5\") pod \"crc-storage-crc-tptgp\" (UID: \"62e0a5b8-3725-4e54-b115-3090181692d2\") " pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.518232 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/62e0a5b8-3725-4e54-b115-3090181692d2-crc-storage\") pod \"crc-storage-crc-tptgp\" (UID: \"62e0a5b8-3725-4e54-b115-3090181692d2\") " pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.619824 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s66q5\" (UniqueName: \"kubernetes.io/projected/62e0a5b8-3725-4e54-b115-3090181692d2-kube-api-access-s66q5\") pod \"crc-storage-crc-tptgp\" (UID: \"62e0a5b8-3725-4e54-b115-3090181692d2\") " pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.619901 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/62e0a5b8-3725-4e54-b115-3090181692d2-crc-storage\") pod \"crc-storage-crc-tptgp\" (UID: \"62e0a5b8-3725-4e54-b115-3090181692d2\") " pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.619986 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/62e0a5b8-3725-4e54-b115-3090181692d2-node-mnt\") pod \"crc-storage-crc-tptgp\" (UID: \"62e0a5b8-3725-4e54-b115-3090181692d2\") " pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.620778 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/62e0a5b8-3725-4e54-b115-3090181692d2-node-mnt\") pod \"crc-storage-crc-tptgp\" (UID: \"62e0a5b8-3725-4e54-b115-3090181692d2\") " pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.621074 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/62e0a5b8-3725-4e54-b115-3090181692d2-crc-storage\") pod \"crc-storage-crc-tptgp\" (UID: \"62e0a5b8-3725-4e54-b115-3090181692d2\") " pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.640654 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s66q5\" (UniqueName: \"kubernetes.io/projected/62e0a5b8-3725-4e54-b115-3090181692d2-kube-api-access-s66q5\") pod \"crc-storage-crc-tptgp\" (UID: \"62e0a5b8-3725-4e54-b115-3090181692d2\") " pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.696341 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:00 crc kubenswrapper[5002]: I1203 17:53:00.850160 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="232e2a1f-78fa-46a7-a1e8-efd499be5d89" path="/var/lib/kubelet/pods/232e2a1f-78fa-46a7-a1e8-efd499be5d89/volumes" Dec 03 17:53:01 crc kubenswrapper[5002]: I1203 17:53:01.149046 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-tptgp"] Dec 03 17:53:01 crc kubenswrapper[5002]: I1203 17:53:01.974447 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-tptgp" event={"ID":"62e0a5b8-3725-4e54-b115-3090181692d2","Type":"ContainerStarted","Data":"dad69004e8d85da08b4adb1ab25427d22ded85cdf6679790ae2ef3a2a21fafa8"} Dec 03 17:53:02 crc kubenswrapper[5002]: I1203 17:53:02.986049 5002 generic.go:334] "Generic (PLEG): container finished" podID="62e0a5b8-3725-4e54-b115-3090181692d2" containerID="2d5997a38c2b1c7dea0518156d2667a650ed1bb51359aa748554f9efe6ca5b2a" exitCode=0 Dec 03 17:53:02 crc kubenswrapper[5002]: I1203 17:53:02.986162 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-tptgp" event={"ID":"62e0a5b8-3725-4e54-b115-3090181692d2","Type":"ContainerDied","Data":"2d5997a38c2b1c7dea0518156d2667a650ed1bb51359aa748554f9efe6ca5b2a"} Dec 03 17:53:04 crc kubenswrapper[5002]: I1203 17:53:04.218729 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:04 crc kubenswrapper[5002]: I1203 17:53:04.372530 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s66q5\" (UniqueName: \"kubernetes.io/projected/62e0a5b8-3725-4e54-b115-3090181692d2-kube-api-access-s66q5\") pod \"62e0a5b8-3725-4e54-b115-3090181692d2\" (UID: \"62e0a5b8-3725-4e54-b115-3090181692d2\") " Dec 03 17:53:04 crc kubenswrapper[5002]: I1203 17:53:04.372594 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/62e0a5b8-3725-4e54-b115-3090181692d2-node-mnt\") pod \"62e0a5b8-3725-4e54-b115-3090181692d2\" (UID: \"62e0a5b8-3725-4e54-b115-3090181692d2\") " Dec 03 17:53:04 crc kubenswrapper[5002]: I1203 17:53:04.372782 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/62e0a5b8-3725-4e54-b115-3090181692d2-crc-storage\") pod \"62e0a5b8-3725-4e54-b115-3090181692d2\" (UID: \"62e0a5b8-3725-4e54-b115-3090181692d2\") " Dec 03 17:53:04 crc kubenswrapper[5002]: I1203 17:53:04.372844 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/62e0a5b8-3725-4e54-b115-3090181692d2-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "62e0a5b8-3725-4e54-b115-3090181692d2" (UID: "62e0a5b8-3725-4e54-b115-3090181692d2"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 17:53:04 crc kubenswrapper[5002]: I1203 17:53:04.373087 5002 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/62e0a5b8-3725-4e54-b115-3090181692d2-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 03 17:53:04 crc kubenswrapper[5002]: I1203 17:53:04.378079 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62e0a5b8-3725-4e54-b115-3090181692d2-kube-api-access-s66q5" (OuterVolumeSpecName: "kube-api-access-s66q5") pod "62e0a5b8-3725-4e54-b115-3090181692d2" (UID: "62e0a5b8-3725-4e54-b115-3090181692d2"). InnerVolumeSpecName "kube-api-access-s66q5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:53:04 crc kubenswrapper[5002]: I1203 17:53:04.398709 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62e0a5b8-3725-4e54-b115-3090181692d2-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "62e0a5b8-3725-4e54-b115-3090181692d2" (UID: "62e0a5b8-3725-4e54-b115-3090181692d2"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:53:04 crc kubenswrapper[5002]: I1203 17:53:04.475032 5002 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/62e0a5b8-3725-4e54-b115-3090181692d2-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 03 17:53:04 crc kubenswrapper[5002]: I1203 17:53:04.475084 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s66q5\" (UniqueName: \"kubernetes.io/projected/62e0a5b8-3725-4e54-b115-3090181692d2-kube-api-access-s66q5\") on node \"crc\" DevicePath \"\"" Dec 03 17:53:05 crc kubenswrapper[5002]: I1203 17:53:05.006216 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-tptgp" event={"ID":"62e0a5b8-3725-4e54-b115-3090181692d2","Type":"ContainerDied","Data":"dad69004e8d85da08b4adb1ab25427d22ded85cdf6679790ae2ef3a2a21fafa8"} Dec 03 17:53:05 crc kubenswrapper[5002]: I1203 17:53:05.006549 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dad69004e8d85da08b4adb1ab25427d22ded85cdf6679790ae2ef3a2a21fafa8" Dec 03 17:53:05 crc kubenswrapper[5002]: I1203 17:53:05.006288 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tptgp" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.027644 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5n4mq"] Dec 03 17:53:13 crc kubenswrapper[5002]: E1203 17:53:13.028611 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62e0a5b8-3725-4e54-b115-3090181692d2" containerName="storage" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.028627 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="62e0a5b8-3725-4e54-b115-3090181692d2" containerName="storage" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.028819 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="62e0a5b8-3725-4e54-b115-3090181692d2" containerName="storage" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.029843 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.052582 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5n4mq"] Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.100117 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-utilities\") pod \"redhat-operators-5n4mq\" (UID: \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\") " pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.100202 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwc4m\" (UniqueName: \"kubernetes.io/projected/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-kube-api-access-dwc4m\") pod \"redhat-operators-5n4mq\" (UID: \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\") " pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.100239 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-catalog-content\") pod \"redhat-operators-5n4mq\" (UID: \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\") " pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.201372 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-utilities\") pod \"redhat-operators-5n4mq\" (UID: \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\") " pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.201457 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwc4m\" (UniqueName: \"kubernetes.io/projected/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-kube-api-access-dwc4m\") pod \"redhat-operators-5n4mq\" (UID: \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\") " pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.201488 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-catalog-content\") pod \"redhat-operators-5n4mq\" (UID: \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\") " pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.202157 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-utilities\") pod \"redhat-operators-5n4mq\" (UID: \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\") " pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.202175 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-catalog-content\") pod \"redhat-operators-5n4mq\" (UID: \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\") " pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.222254 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwc4m\" (UniqueName: \"kubernetes.io/projected/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-kube-api-access-dwc4m\") pod \"redhat-operators-5n4mq\" (UID: \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\") " pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.360019 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:13 crc kubenswrapper[5002]: I1203 17:53:13.850084 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5n4mq"] Dec 03 17:53:14 crc kubenswrapper[5002]: I1203 17:53:14.080202 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5n4mq" event={"ID":"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9","Type":"ContainerStarted","Data":"3445070b40560ac936359e81f05e65193cb9c1a459d181cdf2eb87e301db961e"} Dec 03 17:53:16 crc kubenswrapper[5002]: I1203 17:53:16.092938 5002 generic.go:334] "Generic (PLEG): container finished" podID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" containerID="572eb31c1495153c3ab571b1dac9b13428288146b5efe31669f72e18d5b62af1" exitCode=0 Dec 03 17:53:16 crc kubenswrapper[5002]: I1203 17:53:16.093041 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5n4mq" event={"ID":"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9","Type":"ContainerDied","Data":"572eb31c1495153c3ab571b1dac9b13428288146b5efe31669f72e18d5b62af1"} Dec 03 17:53:17 crc kubenswrapper[5002]: I1203 17:53:17.102168 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5n4mq" event={"ID":"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9","Type":"ContainerStarted","Data":"61ae4fcf06dfb03b5adea70a8c9eb57451b48449823d1c03a22e4fe8c5233eb2"} Dec 03 17:53:18 crc kubenswrapper[5002]: I1203 17:53:18.111113 5002 generic.go:334] "Generic (PLEG): container finished" podID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" containerID="61ae4fcf06dfb03b5adea70a8c9eb57451b48449823d1c03a22e4fe8c5233eb2" exitCode=0 Dec 03 17:53:18 crc kubenswrapper[5002]: I1203 17:53:18.111160 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5n4mq" event={"ID":"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9","Type":"ContainerDied","Data":"61ae4fcf06dfb03b5adea70a8c9eb57451b48449823d1c03a22e4fe8c5233eb2"} Dec 03 17:53:20 crc kubenswrapper[5002]: I1203 17:53:20.135414 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5n4mq" event={"ID":"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9","Type":"ContainerStarted","Data":"0dca195e6d697d02830c48ff47b16f77583284457f280469d9e62c68088c4e93"} Dec 03 17:53:20 crc kubenswrapper[5002]: I1203 17:53:20.157674 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5n4mq" podStartSLOduration=5.377994469 podStartE2EDuration="8.15765621s" podCreationTimestamp="2025-12-03 17:53:12 +0000 UTC" firstStartedPulling="2025-12-03 17:53:16.094872582 +0000 UTC m=+4919.508694470" lastFinishedPulling="2025-12-03 17:53:18.874534323 +0000 UTC m=+4922.288356211" observedRunningTime="2025-12-03 17:53:20.154248868 +0000 UTC m=+4923.568070766" watchObservedRunningTime="2025-12-03 17:53:20.15765621 +0000 UTC m=+4923.571478098" Dec 03 17:53:23 crc kubenswrapper[5002]: I1203 17:53:23.361030 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:23 crc kubenswrapper[5002]: I1203 17:53:23.361425 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:24 crc kubenswrapper[5002]: I1203 17:53:24.406925 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5n4mq" podUID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" containerName="registry-server" probeResult="failure" output=< Dec 03 17:53:24 crc kubenswrapper[5002]: timeout: failed to connect service ":50051" within 1s Dec 03 17:53:24 crc kubenswrapper[5002]: > Dec 03 17:53:33 crc kubenswrapper[5002]: I1203 17:53:33.428603 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:33 crc kubenswrapper[5002]: I1203 17:53:33.572326 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:33 crc kubenswrapper[5002]: I1203 17:53:33.687000 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5n4mq"] Dec 03 17:53:35 crc kubenswrapper[5002]: I1203 17:53:35.248293 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5n4mq" podUID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" containerName="registry-server" containerID="cri-o://0dca195e6d697d02830c48ff47b16f77583284457f280469d9e62c68088c4e93" gracePeriod=2 Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.254869 5002 generic.go:334] "Generic (PLEG): container finished" podID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" containerID="0dca195e6d697d02830c48ff47b16f77583284457f280469d9e62c68088c4e93" exitCode=0 Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.255004 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5n4mq" event={"ID":"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9","Type":"ContainerDied","Data":"0dca195e6d697d02830c48ff47b16f77583284457f280469d9e62c68088c4e93"} Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.255519 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5n4mq" event={"ID":"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9","Type":"ContainerDied","Data":"3445070b40560ac936359e81f05e65193cb9c1a459d181cdf2eb87e301db961e"} Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.255538 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3445070b40560ac936359e81f05e65193cb9c1a459d181cdf2eb87e301db961e" Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.297187 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.430570 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-utilities\") pod \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\" (UID: \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\") " Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.430648 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-catalog-content\") pod \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\" (UID: \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\") " Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.430676 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwc4m\" (UniqueName: \"kubernetes.io/projected/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-kube-api-access-dwc4m\") pod \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\" (UID: \"8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9\") " Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.431844 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-utilities" (OuterVolumeSpecName: "utilities") pod "8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" (UID: "8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.432212 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.435776 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-kube-api-access-dwc4m" (OuterVolumeSpecName: "kube-api-access-dwc4m") pod "8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" (UID: "8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9"). InnerVolumeSpecName "kube-api-access-dwc4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.533389 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwc4m\" (UniqueName: \"kubernetes.io/projected/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-kube-api-access-dwc4m\") on node \"crc\" DevicePath \"\"" Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.571254 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" (UID: "8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:53:36 crc kubenswrapper[5002]: I1203 17:53:36.634663 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:53:37 crc kubenswrapper[5002]: I1203 17:53:37.264220 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5n4mq" Dec 03 17:53:37 crc kubenswrapper[5002]: I1203 17:53:37.291442 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5n4mq"] Dec 03 17:53:37 crc kubenswrapper[5002]: I1203 17:53:37.301199 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5n4mq"] Dec 03 17:53:38 crc kubenswrapper[5002]: I1203 17:53:38.849299 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" path="/var/lib/kubelet/pods/8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9/volumes" Dec 03 17:53:40 crc kubenswrapper[5002]: I1203 17:53:40.959882 5002 scope.go:117] "RemoveContainer" containerID="19e728c93e9ceb38a9c99bec1252830ce0792306044661663cda3346f5acc98f" Dec 03 17:54:50 crc kubenswrapper[5002]: I1203 17:54:50.916769 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:54:50 crc kubenswrapper[5002]: I1203 17:54:50.917272 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.140636 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f5d88f885-c8gzc"] Dec 03 17:55:06 crc kubenswrapper[5002]: E1203 17:55:06.141453 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" containerName="extract-utilities" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.141472 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" containerName="extract-utilities" Dec 03 17:55:06 crc kubenswrapper[5002]: E1203 17:55:06.141508 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" containerName="extract-content" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.141517 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" containerName="extract-content" Dec 03 17:55:06 crc kubenswrapper[5002]: E1203 17:55:06.141537 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" containerName="registry-server" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.141545 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" containerName="registry-server" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.141675 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b5af671-48f4-4ac3-b1c4-e53e6f9cabb9" containerName="registry-server" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.142449 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.156219 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-zmndj" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.156366 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.156769 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.156922 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.180739 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f5d88f885-c8gzc"] Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.222094 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-599f5d6f75-zp8z5"] Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.223298 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.225195 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.230198 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-599f5d6f75-zp8z5"] Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.259733 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4cqk\" (UniqueName: \"kubernetes.io/projected/a5a2466a-492f-442c-ba7c-a6ab13bcd67e-kube-api-access-h4cqk\") pod \"dnsmasq-dns-7f5d88f885-c8gzc\" (UID: \"a5a2466a-492f-442c-ba7c-a6ab13bcd67e\") " pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.259948 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5a2466a-492f-442c-ba7c-a6ab13bcd67e-config\") pod \"dnsmasq-dns-7f5d88f885-c8gzc\" (UID: \"a5a2466a-492f-442c-ba7c-a6ab13bcd67e\") " pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.361139 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9cfe4dbf-7df6-47dd-ac68-e68212c50055-dns-svc\") pod \"dnsmasq-dns-599f5d6f75-zp8z5\" (UID: \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\") " pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.361325 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cfe4dbf-7df6-47dd-ac68-e68212c50055-config\") pod \"dnsmasq-dns-599f5d6f75-zp8z5\" (UID: \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\") " pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.361538 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4cqk\" (UniqueName: \"kubernetes.io/projected/a5a2466a-492f-442c-ba7c-a6ab13bcd67e-kube-api-access-h4cqk\") pod \"dnsmasq-dns-7f5d88f885-c8gzc\" (UID: \"a5a2466a-492f-442c-ba7c-a6ab13bcd67e\") " pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.361601 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5a2466a-492f-442c-ba7c-a6ab13bcd67e-config\") pod \"dnsmasq-dns-7f5d88f885-c8gzc\" (UID: \"a5a2466a-492f-442c-ba7c-a6ab13bcd67e\") " pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.361645 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4jwj\" (UniqueName: \"kubernetes.io/projected/9cfe4dbf-7df6-47dd-ac68-e68212c50055-kube-api-access-f4jwj\") pod \"dnsmasq-dns-599f5d6f75-zp8z5\" (UID: \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\") " pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.362732 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5a2466a-492f-442c-ba7c-a6ab13bcd67e-config\") pod \"dnsmasq-dns-7f5d88f885-c8gzc\" (UID: \"a5a2466a-492f-442c-ba7c-a6ab13bcd67e\") " pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.384605 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4cqk\" (UniqueName: \"kubernetes.io/projected/a5a2466a-492f-442c-ba7c-a6ab13bcd67e-kube-api-access-h4cqk\") pod \"dnsmasq-dns-7f5d88f885-c8gzc\" (UID: \"a5a2466a-492f-442c-ba7c-a6ab13bcd67e\") " pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.462478 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cfe4dbf-7df6-47dd-ac68-e68212c50055-config\") pod \"dnsmasq-dns-599f5d6f75-zp8z5\" (UID: \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\") " pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.462555 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4jwj\" (UniqueName: \"kubernetes.io/projected/9cfe4dbf-7df6-47dd-ac68-e68212c50055-kube-api-access-f4jwj\") pod \"dnsmasq-dns-599f5d6f75-zp8z5\" (UID: \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\") " pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.462595 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9cfe4dbf-7df6-47dd-ac68-e68212c50055-dns-svc\") pod \"dnsmasq-dns-599f5d6f75-zp8z5\" (UID: \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\") " pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.463485 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9cfe4dbf-7df6-47dd-ac68-e68212c50055-dns-svc\") pod \"dnsmasq-dns-599f5d6f75-zp8z5\" (UID: \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\") " pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.463570 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cfe4dbf-7df6-47dd-ac68-e68212c50055-config\") pod \"dnsmasq-dns-599f5d6f75-zp8z5\" (UID: \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\") " pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.482852 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4jwj\" (UniqueName: \"kubernetes.io/projected/9cfe4dbf-7df6-47dd-ac68-e68212c50055-kube-api-access-f4jwj\") pod \"dnsmasq-dns-599f5d6f75-zp8z5\" (UID: \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\") " pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.485400 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-599f5d6f75-zp8z5"] Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.491101 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.497176 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.542207 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76d8c4d77f-t9hh2"] Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.543582 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.557021 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76d8c4d77f-t9hh2"] Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.667718 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/783069a0-9c0a-4987-8ec2-7a770468c554-config\") pod \"dnsmasq-dns-76d8c4d77f-t9hh2\" (UID: \"783069a0-9c0a-4987-8ec2-7a770468c554\") " pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.667805 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/783069a0-9c0a-4987-8ec2-7a770468c554-dns-svc\") pod \"dnsmasq-dns-76d8c4d77f-t9hh2\" (UID: \"783069a0-9c0a-4987-8ec2-7a770468c554\") " pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.667873 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmpkc\" (UniqueName: \"kubernetes.io/projected/783069a0-9c0a-4987-8ec2-7a770468c554-kube-api-access-mmpkc\") pod \"dnsmasq-dns-76d8c4d77f-t9hh2\" (UID: \"783069a0-9c0a-4987-8ec2-7a770468c554\") " pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.771524 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/783069a0-9c0a-4987-8ec2-7a770468c554-config\") pod \"dnsmasq-dns-76d8c4d77f-t9hh2\" (UID: \"783069a0-9c0a-4987-8ec2-7a770468c554\") " pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.772000 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/783069a0-9c0a-4987-8ec2-7a770468c554-dns-svc\") pod \"dnsmasq-dns-76d8c4d77f-t9hh2\" (UID: \"783069a0-9c0a-4987-8ec2-7a770468c554\") " pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.772072 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmpkc\" (UniqueName: \"kubernetes.io/projected/783069a0-9c0a-4987-8ec2-7a770468c554-kube-api-access-mmpkc\") pod \"dnsmasq-dns-76d8c4d77f-t9hh2\" (UID: \"783069a0-9c0a-4987-8ec2-7a770468c554\") " pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.772608 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/783069a0-9c0a-4987-8ec2-7a770468c554-config\") pod \"dnsmasq-dns-76d8c4d77f-t9hh2\" (UID: \"783069a0-9c0a-4987-8ec2-7a770468c554\") " pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.772806 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/783069a0-9c0a-4987-8ec2-7a770468c554-dns-svc\") pod \"dnsmasq-dns-76d8c4d77f-t9hh2\" (UID: \"783069a0-9c0a-4987-8ec2-7a770468c554\") " pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.814579 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmpkc\" (UniqueName: \"kubernetes.io/projected/783069a0-9c0a-4987-8ec2-7a770468c554-kube-api-access-mmpkc\") pod \"dnsmasq-dns-76d8c4d77f-t9hh2\" (UID: \"783069a0-9c0a-4987-8ec2-7a770468c554\") " pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.909164 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.957367 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76d8c4d77f-t9hh2"] Dec 03 17:55:06 crc kubenswrapper[5002]: I1203 17:55:06.984397 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-599f5d6f75-zp8z5"] Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.024881 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cbb4f659c-wtqs6"] Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.026457 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:07 crc kubenswrapper[5002]: W1203 17:55:07.030819 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9cfe4dbf_7df6_47dd_ac68_e68212c50055.slice/crio-bf69a62f159fadc2229b6aa6c0880cd55acab769e2776c86b3c73b21946acc9c WatchSource:0}: Error finding container bf69a62f159fadc2229b6aa6c0880cd55acab769e2776c86b3c73b21946acc9c: Status 404 returned error can't find the container with id bf69a62f159fadc2229b6aa6c0880cd55acab769e2776c86b3c73b21946acc9c Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.036070 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cbb4f659c-wtqs6"] Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.168128 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f5d88f885-c8gzc"] Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.202061 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-dns-svc\") pod \"dnsmasq-dns-7cbb4f659c-wtqs6\" (UID: \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\") " pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.202457 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfltf\" (UniqueName: \"kubernetes.io/projected/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-kube-api-access-gfltf\") pod \"dnsmasq-dns-7cbb4f659c-wtqs6\" (UID: \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\") " pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.202492 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-config\") pod \"dnsmasq-dns-7cbb4f659c-wtqs6\" (UID: \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\") " pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.303398 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-dns-svc\") pod \"dnsmasq-dns-7cbb4f659c-wtqs6\" (UID: \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\") " pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.303455 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfltf\" (UniqueName: \"kubernetes.io/projected/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-kube-api-access-gfltf\") pod \"dnsmasq-dns-7cbb4f659c-wtqs6\" (UID: \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\") " pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.303497 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-config\") pod \"dnsmasq-dns-7cbb4f659c-wtqs6\" (UID: \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\") " pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.304491 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-config\") pod \"dnsmasq-dns-7cbb4f659c-wtqs6\" (UID: \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\") " pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.305294 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-dns-svc\") pod \"dnsmasq-dns-7cbb4f659c-wtqs6\" (UID: \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\") " pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.331036 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfltf\" (UniqueName: \"kubernetes.io/projected/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-kube-api-access-gfltf\") pod \"dnsmasq-dns-7cbb4f659c-wtqs6\" (UID: \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\") " pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.367224 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.491374 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76d8c4d77f-t9hh2"] Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.623020 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cbb4f659c-wtqs6"] Dec 03 17:55:07 crc kubenswrapper[5002]: W1203 17:55:07.628268 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64f1ab3d_2baf_494e_9ba7_6ccbc39a29a2.slice/crio-b87ad890eea3a2592ee79ffcab64b1d9a7143b60b4397eb38090bb3194032d1d WatchSource:0}: Error finding container b87ad890eea3a2592ee79ffcab64b1d9a7143b60b4397eb38090bb3194032d1d: Status 404 returned error can't find the container with id b87ad890eea3a2592ee79ffcab64b1d9a7143b60b4397eb38090bb3194032d1d Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.698210 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.699390 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.701643 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.701896 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.702007 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-jfhll" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.702180 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.702384 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.702500 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.703063 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.717831 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.809595 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.809652 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9jlw\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-kube-api-access-n9jlw\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.809678 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.809726 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.809815 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.809839 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.809856 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-config-data\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.809894 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.810013 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.810045 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.810065 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.911726 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.912024 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.912124 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.912253 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.912341 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9jlw\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-kube-api-access-n9jlw\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.912431 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.912535 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.912622 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.912712 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-config-data\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.912818 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.912959 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.913810 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.913876 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-config-data\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.914577 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.914672 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.914950 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.916603 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.916868 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/af4a5dc127f6ade3b95cda6b38edc9b33be7dc25ffeedb37e4032a4859a29303/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.917375 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.918181 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.918666 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.920254 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.938639 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9jlw\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-kube-api-access-n9jlw\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:07 crc kubenswrapper[5002]: I1203 17:55:07.956183 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\") pod \"rabbitmq-server-0\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " pod="openstack/rabbitmq-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.044247 5002 generic.go:334] "Generic (PLEG): container finished" podID="64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2" containerID="2c70368d03d85e0331f7df6bd8d9de35364d0e28ca07007938fc0035c1d28311" exitCode=0 Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.044346 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" event={"ID":"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2","Type":"ContainerDied","Data":"2c70368d03d85e0331f7df6bd8d9de35364d0e28ca07007938fc0035c1d28311"} Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.044986 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" event={"ID":"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2","Type":"ContainerStarted","Data":"b87ad890eea3a2592ee79ffcab64b1d9a7143b60b4397eb38090bb3194032d1d"} Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.046518 5002 generic.go:334] "Generic (PLEG): container finished" podID="9cfe4dbf-7df6-47dd-ac68-e68212c50055" containerID="dce272941b6a47de2770ec2956bbddb69325bffa0161e63122103cb6c35a441f" exitCode=0 Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.046570 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" event={"ID":"9cfe4dbf-7df6-47dd-ac68-e68212c50055","Type":"ContainerDied","Data":"dce272941b6a47de2770ec2956bbddb69325bffa0161e63122103cb6c35a441f"} Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.046593 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" event={"ID":"9cfe4dbf-7df6-47dd-ac68-e68212c50055","Type":"ContainerStarted","Data":"bf69a62f159fadc2229b6aa6c0880cd55acab769e2776c86b3c73b21946acc9c"} Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.049281 5002 generic.go:334] "Generic (PLEG): container finished" podID="783069a0-9c0a-4987-8ec2-7a770468c554" containerID="b8c10c90de2d1f385da0e9edc28cd69adb4d4bb94aa6a606bb822624a933097a" exitCode=0 Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.049379 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" event={"ID":"783069a0-9c0a-4987-8ec2-7a770468c554","Type":"ContainerDied","Data":"b8c10c90de2d1f385da0e9edc28cd69adb4d4bb94aa6a606bb822624a933097a"} Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.049415 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" event={"ID":"783069a0-9c0a-4987-8ec2-7a770468c554","Type":"ContainerStarted","Data":"4ce0c41f662bf920beeaf94027dc40667353a7943b1301167ebf818af448287c"} Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.056350 5002 generic.go:334] "Generic (PLEG): container finished" podID="a5a2466a-492f-442c-ba7c-a6ab13bcd67e" containerID="93fe6c9b0f6d760ef72a30a118b9c5962c016bff5a5408f456f642c18e09454e" exitCode=0 Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.056640 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" event={"ID":"a5a2466a-492f-442c-ba7c-a6ab13bcd67e","Type":"ContainerDied","Data":"93fe6c9b0f6d760ef72a30a118b9c5962c016bff5a5408f456f642c18e09454e"} Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.056740 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" event={"ID":"a5a2466a-492f-442c-ba7c-a6ab13bcd67e","Type":"ContainerStarted","Data":"f03f4cc9fa70770a3987d278c0fe21800bb4f34ac2c413266fed5e6d2bd7ff3c"} Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.091842 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.142531 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.143665 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.146248 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.146350 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.146412 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.146453 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.146556 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.146683 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-64z69" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.150349 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.156553 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.323164 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/44133afa-0a34-417e-b163-44535b0e3b49-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.323208 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.323242 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.323260 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.323281 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.323310 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.323337 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q4gt\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-kube-api-access-5q4gt\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.323362 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.323396 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.323412 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/44133afa-0a34-417e-b163-44535b0e3b49-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.323451 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.401458 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.424322 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.424361 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.424382 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.424401 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.424424 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q4gt\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-kube-api-access-5q4gt\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.424443 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.424477 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.424492 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/44133afa-0a34-417e-b163-44535b0e3b49-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.424530 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.424560 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/44133afa-0a34-417e-b163-44535b0e3b49-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.424578 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.424964 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.425563 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.426224 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.427597 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.427728 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.429686 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.429738 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.429785 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f9842c43a86c944e2b3df50386d4b6ebe4493964e33fb144fba5d58ddbd0a6d7/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.429887 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/44133afa-0a34-417e-b163-44535b0e3b49-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.430034 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.431507 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.431536 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/44133afa-0a34-417e-b163-44535b0e3b49-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.454366 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.456995 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q4gt\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-kube-api-access-5q4gt\") pod \"rabbitmq-cell1-server-0\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.497932 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.525391 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/783069a0-9c0a-4987-8ec2-7a770468c554-config\") pod \"783069a0-9c0a-4987-8ec2-7a770468c554\" (UID: \"783069a0-9c0a-4987-8ec2-7a770468c554\") " Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.525463 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmpkc\" (UniqueName: \"kubernetes.io/projected/783069a0-9c0a-4987-8ec2-7a770468c554-kube-api-access-mmpkc\") pod \"783069a0-9c0a-4987-8ec2-7a770468c554\" (UID: \"783069a0-9c0a-4987-8ec2-7a770468c554\") " Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.525496 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/783069a0-9c0a-4987-8ec2-7a770468c554-dns-svc\") pod \"783069a0-9c0a-4987-8ec2-7a770468c554\" (UID: \"783069a0-9c0a-4987-8ec2-7a770468c554\") " Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.525554 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4jwj\" (UniqueName: \"kubernetes.io/projected/9cfe4dbf-7df6-47dd-ac68-e68212c50055-kube-api-access-f4jwj\") pod \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\" (UID: \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\") " Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.525583 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9cfe4dbf-7df6-47dd-ac68-e68212c50055-dns-svc\") pod \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\" (UID: \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\") " Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.525630 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cfe4dbf-7df6-47dd-ac68-e68212c50055-config\") pod \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\" (UID: \"9cfe4dbf-7df6-47dd-ac68-e68212c50055\") " Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.532447 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/783069a0-9c0a-4987-8ec2-7a770468c554-kube-api-access-mmpkc" (OuterVolumeSpecName: "kube-api-access-mmpkc") pod "783069a0-9c0a-4987-8ec2-7a770468c554" (UID: "783069a0-9c0a-4987-8ec2-7a770468c554"). InnerVolumeSpecName "kube-api-access-mmpkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.546729 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cfe4dbf-7df6-47dd-ac68-e68212c50055-config" (OuterVolumeSpecName: "config") pod "9cfe4dbf-7df6-47dd-ac68-e68212c50055" (UID: "9cfe4dbf-7df6-47dd-ac68-e68212c50055"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.549374 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cfe4dbf-7df6-47dd-ac68-e68212c50055-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9cfe4dbf-7df6-47dd-ac68-e68212c50055" (UID: "9cfe4dbf-7df6-47dd-ac68-e68212c50055"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.551792 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cfe4dbf-7df6-47dd-ac68-e68212c50055-kube-api-access-f4jwj" (OuterVolumeSpecName: "kube-api-access-f4jwj") pod "9cfe4dbf-7df6-47dd-ac68-e68212c50055" (UID: "9cfe4dbf-7df6-47dd-ac68-e68212c50055"). InnerVolumeSpecName "kube-api-access-f4jwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.552303 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/783069a0-9c0a-4987-8ec2-7a770468c554-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "783069a0-9c0a-4987-8ec2-7a770468c554" (UID: "783069a0-9c0a-4987-8ec2-7a770468c554"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.558921 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/783069a0-9c0a-4987-8ec2-7a770468c554-config" (OuterVolumeSpecName: "config") pod "783069a0-9c0a-4987-8ec2-7a770468c554" (UID: "783069a0-9c0a-4987-8ec2-7a770468c554"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.627506 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cfe4dbf-7df6-47dd-ac68-e68212c50055-config\") on node \"crc\" DevicePath \"\"" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.627785 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/783069a0-9c0a-4987-8ec2-7a770468c554-config\") on node \"crc\" DevicePath \"\"" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.627795 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmpkc\" (UniqueName: \"kubernetes.io/projected/783069a0-9c0a-4987-8ec2-7a770468c554-kube-api-access-mmpkc\") on node \"crc\" DevicePath \"\"" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.627806 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/783069a0-9c0a-4987-8ec2-7a770468c554-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.627815 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4jwj\" (UniqueName: \"kubernetes.io/projected/9cfe4dbf-7df6-47dd-ac68-e68212c50055-kube-api-access-f4jwj\") on node \"crc\" DevicePath \"\"" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.627824 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9cfe4dbf-7df6-47dd-ac68-e68212c50055-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.735499 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.920333 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 17:55:08 crc kubenswrapper[5002]: W1203 17:55:08.921862 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44133afa_0a34_417e_b163_44535b0e3b49.slice/crio-57f6c9f27f919ef708200452d52b3e81f7647eb84656d7dfdb5d326f0f836b8b WatchSource:0}: Error finding container 57f6c9f27f919ef708200452d52b3e81f7647eb84656d7dfdb5d326f0f836b8b: Status 404 returned error can't find the container with id 57f6c9f27f919ef708200452d52b3e81f7647eb84656d7dfdb5d326f0f836b8b Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.973046 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 03 17:55:08 crc kubenswrapper[5002]: E1203 17:55:08.973387 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="783069a0-9c0a-4987-8ec2-7a770468c554" containerName="init" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.973405 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="783069a0-9c0a-4987-8ec2-7a770468c554" containerName="init" Dec 03 17:55:08 crc kubenswrapper[5002]: E1203 17:55:08.973420 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cfe4dbf-7df6-47dd-ac68-e68212c50055" containerName="init" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.973427 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cfe4dbf-7df6-47dd-ac68-e68212c50055" containerName="init" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.973601 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="783069a0-9c0a-4987-8ec2-7a770468c554" containerName="init" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.973618 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cfe4dbf-7df6-47dd-ac68-e68212c50055" containerName="init" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.974768 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.977826 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.978195 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-jx2ch" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.978450 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.978477 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.983056 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 17:55:08 crc kubenswrapper[5002]: I1203 17:55:08.986165 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.063236 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"44133afa-0a34-417e-b163-44535b0e3b49","Type":"ContainerStarted","Data":"57f6c9f27f919ef708200452d52b3e81f7647eb84656d7dfdb5d326f0f836b8b"} Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.065389 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" event={"ID":"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2","Type":"ContainerStarted","Data":"b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4"} Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.065480 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.068030 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.068850 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-599f5d6f75-zp8z5" event={"ID":"9cfe4dbf-7df6-47dd-ac68-e68212c50055","Type":"ContainerDied","Data":"bf69a62f159fadc2229b6aa6c0880cd55acab769e2776c86b3c73b21946acc9c"} Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.068958 5002 scope.go:117] "RemoveContainer" containerID="dce272941b6a47de2770ec2956bbddb69325bffa0161e63122103cb6c35a441f" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.069836 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6cea0e0f-b554-4cf2-a4d9-16d5487260a2","Type":"ContainerStarted","Data":"4ba521b9bcde4c3e73ae48c279d4a624ed4f75b1cba92c08a8e8ca5332b46518"} Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.083594 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" event={"ID":"783069a0-9c0a-4987-8ec2-7a770468c554","Type":"ContainerDied","Data":"4ce0c41f662bf920beeaf94027dc40667353a7943b1301167ebf818af448287c"} Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.083733 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76d8c4d77f-t9hh2" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.110962 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" event={"ID":"a5a2466a-492f-442c-ba7c-a6ab13bcd67e","Type":"ContainerStarted","Data":"0fcac89abc20ce648bb7642f3b79a32aeba90a6726122f477970ed31527e8e7f"} Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.111659 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.113245 5002 scope.go:117] "RemoveContainer" containerID="b8c10c90de2d1f385da0e9edc28cd69adb4d4bb94aa6a606bb822624a933097a" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.126848 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" podStartSLOduration=3.126824124 podStartE2EDuration="3.126824124s" podCreationTimestamp="2025-12-03 17:55:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:55:09.094109854 +0000 UTC m=+5032.507931752" watchObservedRunningTime="2025-12-03 17:55:09.126824124 +0000 UTC m=+5032.540646012" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.139436 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8f9fe730-961c-4a2d-9d38-7c2bc827864b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8f9fe730-961c-4a2d-9d38-7c2bc827864b\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.139509 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69lzn\" (UniqueName: \"kubernetes.io/projected/963808a8-caf6-4d66-a86b-be61b550a6a0-kube-api-access-69lzn\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.139588 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/963808a8-caf6-4d66-a86b-be61b550a6a0-kolla-config\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.139653 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/963808a8-caf6-4d66-a86b-be61b550a6a0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.139677 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/963808a8-caf6-4d66-a86b-be61b550a6a0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.139732 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/963808a8-caf6-4d66-a86b-be61b550a6a0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.139781 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/963808a8-caf6-4d66-a86b-be61b550a6a0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.139822 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/963808a8-caf6-4d66-a86b-be61b550a6a0-config-data-default\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.153775 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-599f5d6f75-zp8z5"] Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.159500 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-599f5d6f75-zp8z5"] Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.193403 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76d8c4d77f-t9hh2"] Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.204288 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76d8c4d77f-t9hh2"] Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.208496 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" podStartSLOduration=3.208469249 podStartE2EDuration="3.208469249s" podCreationTimestamp="2025-12-03 17:55:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:55:09.202452928 +0000 UTC m=+5032.616274816" watchObservedRunningTime="2025-12-03 17:55:09.208469249 +0000 UTC m=+5032.622291137" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.241452 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8f9fe730-961c-4a2d-9d38-7c2bc827864b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8f9fe730-961c-4a2d-9d38-7c2bc827864b\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.241507 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69lzn\" (UniqueName: \"kubernetes.io/projected/963808a8-caf6-4d66-a86b-be61b550a6a0-kube-api-access-69lzn\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.241575 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/963808a8-caf6-4d66-a86b-be61b550a6a0-kolla-config\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.241611 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/963808a8-caf6-4d66-a86b-be61b550a6a0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.241632 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/963808a8-caf6-4d66-a86b-be61b550a6a0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.241666 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/963808a8-caf6-4d66-a86b-be61b550a6a0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.241692 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/963808a8-caf6-4d66-a86b-be61b550a6a0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.241711 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/963808a8-caf6-4d66-a86b-be61b550a6a0-config-data-default\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.242393 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/963808a8-caf6-4d66-a86b-be61b550a6a0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.243179 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/963808a8-caf6-4d66-a86b-be61b550a6a0-config-data-default\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.243551 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/963808a8-caf6-4d66-a86b-be61b550a6a0-kolla-config\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.244783 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/963808a8-caf6-4d66-a86b-be61b550a6a0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.244974 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.245004 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8f9fe730-961c-4a2d-9d38-7c2bc827864b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8f9fe730-961c-4a2d-9d38-7c2bc827864b\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/80158a8202aad65eb72aa47d7f611b3fd4e79f1675791c6c5da9fc7e040b89fb/globalmount\"" pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.339931 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/963808a8-caf6-4d66-a86b-be61b550a6a0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.340331 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/963808a8-caf6-4d66-a86b-be61b550a6a0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.343227 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69lzn\" (UniqueName: \"kubernetes.io/projected/963808a8-caf6-4d66-a86b-be61b550a6a0-kube-api-access-69lzn\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.361675 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8f9fe730-961c-4a2d-9d38-7c2bc827864b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8f9fe730-961c-4a2d-9d38-7c2bc827864b\") pod \"openstack-galera-0\" (UID: \"963808a8-caf6-4d66-a86b-be61b550a6a0\") " pod="openstack/openstack-galera-0" Dec 03 17:55:09 crc kubenswrapper[5002]: I1203 17:55:09.600428 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.053226 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 17:55:10 crc kubenswrapper[5002]: W1203 17:55:10.056884 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod963808a8_caf6_4d66_a86b_be61b550a6a0.slice/crio-130f6a203ec0e34fa4d92d12216f4d848f1da8402dd390923b1c2da559502b44 WatchSource:0}: Error finding container 130f6a203ec0e34fa4d92d12216f4d848f1da8402dd390923b1c2da559502b44: Status 404 returned error can't find the container with id 130f6a203ec0e34fa4d92d12216f4d848f1da8402dd390923b1c2da559502b44 Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.121107 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"963808a8-caf6-4d66-a86b-be61b550a6a0","Type":"ContainerStarted","Data":"130f6a203ec0e34fa4d92d12216f4d848f1da8402dd390923b1c2da559502b44"} Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.613303 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.618302 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.621349 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.621668 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.626035 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.629274 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-xqf8d" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.629463 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.767837 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05e199a0-a421-4d94-8454-a02e66aca0c9-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.767888 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5sbj\" (UniqueName: \"kubernetes.io/projected/05e199a0-a421-4d94-8454-a02e66aca0c9-kube-api-access-w5sbj\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.767933 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05e199a0-a421-4d94-8454-a02e66aca0c9-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.768072 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05e199a0-a421-4d94-8454-a02e66aca0c9-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.768128 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e199a0-a421-4d94-8454-a02e66aca0c9-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.768180 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4184a185-7d1d-409d-ac51-5ce9b8bda7c6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4184a185-7d1d-409d-ac51-5ce9b8bda7c6\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.768272 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05e199a0-a421-4d94-8454-a02e66aca0c9-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.768321 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e199a0-a421-4d94-8454-a02e66aca0c9-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.850928 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="783069a0-9c0a-4987-8ec2-7a770468c554" path="/var/lib/kubelet/pods/783069a0-9c0a-4987-8ec2-7a770468c554/volumes" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.851428 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cfe4dbf-7df6-47dd-ac68-e68212c50055" path="/var/lib/kubelet/pods/9cfe4dbf-7df6-47dd-ac68-e68212c50055/volumes" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.869284 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5sbj\" (UniqueName: \"kubernetes.io/projected/05e199a0-a421-4d94-8454-a02e66aca0c9-kube-api-access-w5sbj\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.869336 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05e199a0-a421-4d94-8454-a02e66aca0c9-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.869407 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05e199a0-a421-4d94-8454-a02e66aca0c9-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.869431 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e199a0-a421-4d94-8454-a02e66aca0c9-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.869464 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4184a185-7d1d-409d-ac51-5ce9b8bda7c6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4184a185-7d1d-409d-ac51-5ce9b8bda7c6\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.869494 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05e199a0-a421-4d94-8454-a02e66aca0c9-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.869527 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e199a0-a421-4d94-8454-a02e66aca0c9-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.869583 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05e199a0-a421-4d94-8454-a02e66aca0c9-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.869985 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05e199a0-a421-4d94-8454-a02e66aca0c9-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.870531 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05e199a0-a421-4d94-8454-a02e66aca0c9-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.870829 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05e199a0-a421-4d94-8454-a02e66aca0c9-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.871164 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05e199a0-a421-4d94-8454-a02e66aca0c9-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.874421 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.874462 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4184a185-7d1d-409d-ac51-5ce9b8bda7c6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4184a185-7d1d-409d-ac51-5ce9b8bda7c6\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/77065f24a52ec3aee431eaa5363c05e4bccac064e9bb92d746367b5d4e6db891/globalmount\"" pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.877716 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e199a0-a421-4d94-8454-a02e66aca0c9-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.884956 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e199a0-a421-4d94-8454-a02e66aca0c9-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.892269 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5sbj\" (UniqueName: \"kubernetes.io/projected/05e199a0-a421-4d94-8454-a02e66aca0c9-kube-api-access-w5sbj\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.919471 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4184a185-7d1d-409d-ac51-5ce9b8bda7c6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4184a185-7d1d-409d-ac51-5ce9b8bda7c6\") pod \"openstack-cell1-galera-0\" (UID: \"05e199a0-a421-4d94-8454-a02e66aca0c9\") " pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.961180 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.962176 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.967315 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.968771 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.970151 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-62whf" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.970388 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 03 17:55:10 crc kubenswrapper[5002]: I1203 17:55:10.971627 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.075527 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-kolla-config\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.075590 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jp46v\" (UniqueName: \"kubernetes.io/projected/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-kube-api-access-jp46v\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.075611 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-config-data\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.075671 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.075756 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.143718 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"44133afa-0a34-417e-b163-44535b0e3b49","Type":"ContainerStarted","Data":"eb86557f898f104f5f31be5b3d9d63171d440c2f0e03deb3f2b58c889d02fd48"} Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.146924 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6cea0e0f-b554-4cf2-a4d9-16d5487260a2","Type":"ContainerStarted","Data":"e25d45c21395ae142b3d335bdd21be02cc18a464dbc055cc873496ca7d90c52c"} Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.149895 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"963808a8-caf6-4d66-a86b-be61b550a6a0","Type":"ContainerStarted","Data":"7d028847abb7c8b5f0a8134d963d457c76444ed3120cf0ce69448b08d00c852e"} Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.176908 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.176957 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-kolla-config\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.176985 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jp46v\" (UniqueName: \"kubernetes.io/projected/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-kube-api-access-jp46v\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.177006 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-config-data\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.177046 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.178004 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-kolla-config\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.178316 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-config-data\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.185271 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.185770 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.199093 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jp46v\" (UniqueName: \"kubernetes.io/projected/d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7-kube-api-access-jp46v\") pod \"memcached-0\" (UID: \"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7\") " pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.339998 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.409039 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 17:55:11 crc kubenswrapper[5002]: W1203 17:55:11.412828 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05e199a0_a421_4d94_8454_a02e66aca0c9.slice/crio-810187bdd13587def74abc671614694d04f43b18556aaa636b83a6c4e885847f WatchSource:0}: Error finding container 810187bdd13587def74abc671614694d04f43b18556aaa636b83a6c4e885847f: Status 404 returned error can't find the container with id 810187bdd13587def74abc671614694d04f43b18556aaa636b83a6c4e885847f Dec 03 17:55:11 crc kubenswrapper[5002]: I1203 17:55:11.756952 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 17:55:11 crc kubenswrapper[5002]: W1203 17:55:11.760965 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5f3a5b4_4fa3_478d_afac_03ed26d8b3d7.slice/crio-c80879dc761a42b38ebdacd162ac7dda2e11e260575141a41308689048de25b6 WatchSource:0}: Error finding container c80879dc761a42b38ebdacd162ac7dda2e11e260575141a41308689048de25b6: Status 404 returned error can't find the container with id c80879dc761a42b38ebdacd162ac7dda2e11e260575141a41308689048de25b6 Dec 03 17:55:12 crc kubenswrapper[5002]: I1203 17:55:12.158788 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7","Type":"ContainerStarted","Data":"e57648efca82eb6d6b8ab30f98a73b2bcc2e980d3d5226d292dce4efc84cdd35"} Dec 03 17:55:12 crc kubenswrapper[5002]: I1203 17:55:12.158838 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7","Type":"ContainerStarted","Data":"c80879dc761a42b38ebdacd162ac7dda2e11e260575141a41308689048de25b6"} Dec 03 17:55:12 crc kubenswrapper[5002]: I1203 17:55:12.158891 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 03 17:55:12 crc kubenswrapper[5002]: I1203 17:55:12.160195 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"05e199a0-a421-4d94-8454-a02e66aca0c9","Type":"ContainerStarted","Data":"0675bd9bb768dd0f99374545638c30e4ef6b8f32c3249acd02f3a2786706ff8c"} Dec 03 17:55:12 crc kubenswrapper[5002]: I1203 17:55:12.160241 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"05e199a0-a421-4d94-8454-a02e66aca0c9","Type":"ContainerStarted","Data":"810187bdd13587def74abc671614694d04f43b18556aaa636b83a6c4e885847f"} Dec 03 17:55:12 crc kubenswrapper[5002]: I1203 17:55:12.182244 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.18222286 podStartE2EDuration="2.18222286s" podCreationTimestamp="2025-12-03 17:55:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:55:12.180894365 +0000 UTC m=+5035.594716253" watchObservedRunningTime="2025-12-03 17:55:12.18222286 +0000 UTC m=+5035.596044748" Dec 03 17:55:14 crc kubenswrapper[5002]: I1203 17:55:14.175771 5002 generic.go:334] "Generic (PLEG): container finished" podID="963808a8-caf6-4d66-a86b-be61b550a6a0" containerID="7d028847abb7c8b5f0a8134d963d457c76444ed3120cf0ce69448b08d00c852e" exitCode=0 Dec 03 17:55:14 crc kubenswrapper[5002]: I1203 17:55:14.175855 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"963808a8-caf6-4d66-a86b-be61b550a6a0","Type":"ContainerDied","Data":"7d028847abb7c8b5f0a8134d963d457c76444ed3120cf0ce69448b08d00c852e"} Dec 03 17:55:15 crc kubenswrapper[5002]: I1203 17:55:15.190395 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"963808a8-caf6-4d66-a86b-be61b550a6a0","Type":"ContainerStarted","Data":"dfc5c2c3337e19101f947009dc28da10205a51b4f19016ea79220d5ec2e5ff7c"} Dec 03 17:55:15 crc kubenswrapper[5002]: I1203 17:55:15.191814 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"05e199a0-a421-4d94-8454-a02e66aca0c9","Type":"ContainerDied","Data":"0675bd9bb768dd0f99374545638c30e4ef6b8f32c3249acd02f3a2786706ff8c"} Dec 03 17:55:15 crc kubenswrapper[5002]: I1203 17:55:15.191509 5002 generic.go:334] "Generic (PLEG): container finished" podID="05e199a0-a421-4d94-8454-a02e66aca0c9" containerID="0675bd9bb768dd0f99374545638c30e4ef6b8f32c3249acd02f3a2786706ff8c" exitCode=0 Dec 03 17:55:15 crc kubenswrapper[5002]: I1203 17:55:15.235677 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.235656783 podStartE2EDuration="8.235656783s" podCreationTimestamp="2025-12-03 17:55:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:55:15.230689619 +0000 UTC m=+5038.644511537" watchObservedRunningTime="2025-12-03 17:55:15.235656783 +0000 UTC m=+5038.649478671" Dec 03 17:55:16 crc kubenswrapper[5002]: I1203 17:55:16.203193 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"05e199a0-a421-4d94-8454-a02e66aca0c9","Type":"ContainerStarted","Data":"14ac73ca293ecde804809b7f37e1af13e79671aaa97a14a6228800e974e94c98"} Dec 03 17:55:16 crc kubenswrapper[5002]: I1203 17:55:16.236874 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.23685624 podStartE2EDuration="7.23685624s" podCreationTimestamp="2025-12-03 17:55:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:55:16.231363832 +0000 UTC m=+5039.645185760" watchObservedRunningTime="2025-12-03 17:55:16.23685624 +0000 UTC m=+5039.650678128" Dec 03 17:55:16 crc kubenswrapper[5002]: I1203 17:55:16.341974 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 03 17:55:16 crc kubenswrapper[5002]: I1203 17:55:16.498912 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" Dec 03 17:55:17 crc kubenswrapper[5002]: I1203 17:55:17.368953 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:55:17 crc kubenswrapper[5002]: I1203 17:55:17.429201 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f5d88f885-c8gzc"] Dec 03 17:55:17 crc kubenswrapper[5002]: I1203 17:55:17.429453 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" podUID="a5a2466a-492f-442c-ba7c-a6ab13bcd67e" containerName="dnsmasq-dns" containerID="cri-o://0fcac89abc20ce648bb7642f3b79a32aeba90a6726122f477970ed31527e8e7f" gracePeriod=10 Dec 03 17:55:18 crc kubenswrapper[5002]: I1203 17:55:18.224025 5002 generic.go:334] "Generic (PLEG): container finished" podID="a5a2466a-492f-442c-ba7c-a6ab13bcd67e" containerID="0fcac89abc20ce648bb7642f3b79a32aeba90a6726122f477970ed31527e8e7f" exitCode=0 Dec 03 17:55:18 crc kubenswrapper[5002]: I1203 17:55:18.224139 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" event={"ID":"a5a2466a-492f-442c-ba7c-a6ab13bcd67e","Type":"ContainerDied","Data":"0fcac89abc20ce648bb7642f3b79a32aeba90a6726122f477970ed31527e8e7f"} Dec 03 17:55:18 crc kubenswrapper[5002]: I1203 17:55:18.423931 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" Dec 03 17:55:18 crc kubenswrapper[5002]: I1203 17:55:18.497860 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4cqk\" (UniqueName: \"kubernetes.io/projected/a5a2466a-492f-442c-ba7c-a6ab13bcd67e-kube-api-access-h4cqk\") pod \"a5a2466a-492f-442c-ba7c-a6ab13bcd67e\" (UID: \"a5a2466a-492f-442c-ba7c-a6ab13bcd67e\") " Dec 03 17:55:18 crc kubenswrapper[5002]: I1203 17:55:18.497906 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5a2466a-492f-442c-ba7c-a6ab13bcd67e-config\") pod \"a5a2466a-492f-442c-ba7c-a6ab13bcd67e\" (UID: \"a5a2466a-492f-442c-ba7c-a6ab13bcd67e\") " Dec 03 17:55:18 crc kubenswrapper[5002]: I1203 17:55:18.509019 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5a2466a-492f-442c-ba7c-a6ab13bcd67e-kube-api-access-h4cqk" (OuterVolumeSpecName: "kube-api-access-h4cqk") pod "a5a2466a-492f-442c-ba7c-a6ab13bcd67e" (UID: "a5a2466a-492f-442c-ba7c-a6ab13bcd67e"). InnerVolumeSpecName "kube-api-access-h4cqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:55:18 crc kubenswrapper[5002]: I1203 17:55:18.545831 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5a2466a-492f-442c-ba7c-a6ab13bcd67e-config" (OuterVolumeSpecName: "config") pod "a5a2466a-492f-442c-ba7c-a6ab13bcd67e" (UID: "a5a2466a-492f-442c-ba7c-a6ab13bcd67e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:55:18 crc kubenswrapper[5002]: I1203 17:55:18.600118 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4cqk\" (UniqueName: \"kubernetes.io/projected/a5a2466a-492f-442c-ba7c-a6ab13bcd67e-kube-api-access-h4cqk\") on node \"crc\" DevicePath \"\"" Dec 03 17:55:18 crc kubenswrapper[5002]: I1203 17:55:18.600168 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5a2466a-492f-442c-ba7c-a6ab13bcd67e-config\") on node \"crc\" DevicePath \"\"" Dec 03 17:55:18 crc kubenswrapper[5002]: E1203 17:55:18.718295 5002 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.155:48162->38.102.83.155:42635: write tcp 38.102.83.155:48162->38.102.83.155:42635: write: broken pipe Dec 03 17:55:19 crc kubenswrapper[5002]: I1203 17:55:19.238690 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" event={"ID":"a5a2466a-492f-442c-ba7c-a6ab13bcd67e","Type":"ContainerDied","Data":"f03f4cc9fa70770a3987d278c0fe21800bb4f34ac2c413266fed5e6d2bd7ff3c"} Dec 03 17:55:19 crc kubenswrapper[5002]: I1203 17:55:19.238811 5002 scope.go:117] "RemoveContainer" containerID="0fcac89abc20ce648bb7642f3b79a32aeba90a6726122f477970ed31527e8e7f" Dec 03 17:55:19 crc kubenswrapper[5002]: I1203 17:55:19.239006 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f5d88f885-c8gzc" Dec 03 17:55:19 crc kubenswrapper[5002]: I1203 17:55:19.264260 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f5d88f885-c8gzc"] Dec 03 17:55:19 crc kubenswrapper[5002]: I1203 17:55:19.270805 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f5d88f885-c8gzc"] Dec 03 17:55:19 crc kubenswrapper[5002]: I1203 17:55:19.281759 5002 scope.go:117] "RemoveContainer" containerID="93fe6c9b0f6d760ef72a30a118b9c5962c016bff5a5408f456f642c18e09454e" Dec 03 17:55:19 crc kubenswrapper[5002]: I1203 17:55:19.600805 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 03 17:55:19 crc kubenswrapper[5002]: I1203 17:55:19.601205 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 03 17:55:20 crc kubenswrapper[5002]: E1203 17:55:20.168992 5002 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.155:48186->38.102.83.155:42635: read tcp 38.102.83.155:48186->38.102.83.155:42635: read: connection reset by peer Dec 03 17:55:20 crc kubenswrapper[5002]: I1203 17:55:20.849641 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5a2466a-492f-442c-ba7c-a6ab13bcd67e" path="/var/lib/kubelet/pods/a5a2466a-492f-442c-ba7c-a6ab13bcd67e/volumes" Dec 03 17:55:20 crc kubenswrapper[5002]: I1203 17:55:20.916370 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:55:20 crc kubenswrapper[5002]: I1203 17:55:20.916439 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:55:20 crc kubenswrapper[5002]: I1203 17:55:20.963478 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:20 crc kubenswrapper[5002]: I1203 17:55:20.963538 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:21 crc kubenswrapper[5002]: I1203 17:55:21.806230 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 03 17:55:21 crc kubenswrapper[5002]: I1203 17:55:21.891399 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 03 17:55:23 crc kubenswrapper[5002]: I1203 17:55:23.131579 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:23 crc kubenswrapper[5002]: I1203 17:55:23.205843 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 03 17:55:42 crc kubenswrapper[5002]: I1203 17:55:42.444543 5002 generic.go:334] "Generic (PLEG): container finished" podID="6cea0e0f-b554-4cf2-a4d9-16d5487260a2" containerID="e25d45c21395ae142b3d335bdd21be02cc18a464dbc055cc873496ca7d90c52c" exitCode=0 Dec 03 17:55:42 crc kubenswrapper[5002]: I1203 17:55:42.444668 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6cea0e0f-b554-4cf2-a4d9-16d5487260a2","Type":"ContainerDied","Data":"e25d45c21395ae142b3d335bdd21be02cc18a464dbc055cc873496ca7d90c52c"} Dec 03 17:55:42 crc kubenswrapper[5002]: I1203 17:55:42.447391 5002 generic.go:334] "Generic (PLEG): container finished" podID="44133afa-0a34-417e-b163-44535b0e3b49" containerID="eb86557f898f104f5f31be5b3d9d63171d440c2f0e03deb3f2b58c889d02fd48" exitCode=0 Dec 03 17:55:42 crc kubenswrapper[5002]: I1203 17:55:42.447476 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"44133afa-0a34-417e-b163-44535b0e3b49","Type":"ContainerDied","Data":"eb86557f898f104f5f31be5b3d9d63171d440c2f0e03deb3f2b58c889d02fd48"} Dec 03 17:55:43 crc kubenswrapper[5002]: I1203 17:55:43.458516 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"44133afa-0a34-417e-b163-44535b0e3b49","Type":"ContainerStarted","Data":"09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4"} Dec 03 17:55:43 crc kubenswrapper[5002]: I1203 17:55:43.459864 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:55:43 crc kubenswrapper[5002]: I1203 17:55:43.461676 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6cea0e0f-b554-4cf2-a4d9-16d5487260a2","Type":"ContainerStarted","Data":"50dd6e5b6c8791eb9952b8ee23bc43523d2c1200ab318d4a4201384f751d15e3"} Dec 03 17:55:43 crc kubenswrapper[5002]: I1203 17:55:43.461951 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 17:55:43 crc kubenswrapper[5002]: I1203 17:55:43.495120 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.495096545 podStartE2EDuration="36.495096545s" podCreationTimestamp="2025-12-03 17:55:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:55:43.484627012 +0000 UTC m=+5066.898448920" watchObservedRunningTime="2025-12-03 17:55:43.495096545 +0000 UTC m=+5066.908918433" Dec 03 17:55:43 crc kubenswrapper[5002]: I1203 17:55:43.512434 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.51241934 podStartE2EDuration="37.51241934s" podCreationTimestamp="2025-12-03 17:55:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:55:43.508192367 +0000 UTC m=+5066.922014255" watchObservedRunningTime="2025-12-03 17:55:43.51241934 +0000 UTC m=+5066.926241228" Dec 03 17:55:50 crc kubenswrapper[5002]: I1203 17:55:50.916594 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:55:50 crc kubenswrapper[5002]: I1203 17:55:50.917276 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:55:50 crc kubenswrapper[5002]: I1203 17:55:50.917332 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 17:55:50 crc kubenswrapper[5002]: I1203 17:55:50.918084 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"94e292ef11c6c843fa462b5f23f7bcb46c279bfc5926f3fae23ecbd8649fdcdd"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:55:50 crc kubenswrapper[5002]: I1203 17:55:50.918153 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://94e292ef11c6c843fa462b5f23f7bcb46c279bfc5926f3fae23ecbd8649fdcdd" gracePeriod=600 Dec 03 17:55:51 crc kubenswrapper[5002]: I1203 17:55:51.526325 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="94e292ef11c6c843fa462b5f23f7bcb46c279bfc5926f3fae23ecbd8649fdcdd" exitCode=0 Dec 03 17:55:51 crc kubenswrapper[5002]: I1203 17:55:51.526902 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"94e292ef11c6c843fa462b5f23f7bcb46c279bfc5926f3fae23ecbd8649fdcdd"} Dec 03 17:55:51 crc kubenswrapper[5002]: I1203 17:55:51.526927 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f"} Dec 03 17:55:51 crc kubenswrapper[5002]: I1203 17:55:51.526943 5002 scope.go:117] "RemoveContainer" containerID="c13d1a741009c76cb8e073861cacf59b5876bc4cbb276cfda3005f25637a35cc" Dec 03 17:55:58 crc kubenswrapper[5002]: I1203 17:55:58.096037 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 17:55:58 crc kubenswrapper[5002]: I1203 17:55:58.500942 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.704006 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f79bf7859-nxvrw"] Dec 03 17:56:02 crc kubenswrapper[5002]: E1203 17:56:02.704931 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5a2466a-492f-442c-ba7c-a6ab13bcd67e" containerName="init" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.704948 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5a2466a-492f-442c-ba7c-a6ab13bcd67e" containerName="init" Dec 03 17:56:02 crc kubenswrapper[5002]: E1203 17:56:02.704978 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5a2466a-492f-442c-ba7c-a6ab13bcd67e" containerName="dnsmasq-dns" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.704987 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5a2466a-492f-442c-ba7c-a6ab13bcd67e" containerName="dnsmasq-dns" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.705189 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5a2466a-492f-442c-ba7c-a6ab13bcd67e" containerName="dnsmasq-dns" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.706195 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.716213 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f79bf7859-nxvrw"] Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.773047 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-dns-svc\") pod \"dnsmasq-dns-f79bf7859-nxvrw\" (UID: \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\") " pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.773126 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-config\") pod \"dnsmasq-dns-f79bf7859-nxvrw\" (UID: \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\") " pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.773192 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mrtz\" (UniqueName: \"kubernetes.io/projected/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-kube-api-access-2mrtz\") pod \"dnsmasq-dns-f79bf7859-nxvrw\" (UID: \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\") " pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.875582 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mrtz\" (UniqueName: \"kubernetes.io/projected/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-kube-api-access-2mrtz\") pod \"dnsmasq-dns-f79bf7859-nxvrw\" (UID: \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\") " pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.875676 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-dns-svc\") pod \"dnsmasq-dns-f79bf7859-nxvrw\" (UID: \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\") " pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.876282 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-config\") pod \"dnsmasq-dns-f79bf7859-nxvrw\" (UID: \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\") " pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.877139 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-dns-svc\") pod \"dnsmasq-dns-f79bf7859-nxvrw\" (UID: \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\") " pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.878506 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-config\") pod \"dnsmasq-dns-f79bf7859-nxvrw\" (UID: \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\") " pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:02 crc kubenswrapper[5002]: I1203 17:56:02.893434 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mrtz\" (UniqueName: \"kubernetes.io/projected/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-kube-api-access-2mrtz\") pod \"dnsmasq-dns-f79bf7859-nxvrw\" (UID: \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\") " pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:03 crc kubenswrapper[5002]: I1203 17:56:03.030527 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:03 crc kubenswrapper[5002]: W1203 17:56:03.254372 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a34a01e_9a6b_4b0f_bfbf_a2af8859558e.slice/crio-b9f11ce461e1b81a25d97a6eaba0e72c05d6b491da45fae194f80a3b81074f75 WatchSource:0}: Error finding container b9f11ce461e1b81a25d97a6eaba0e72c05d6b491da45fae194f80a3b81074f75: Status 404 returned error can't find the container with id b9f11ce461e1b81a25d97a6eaba0e72c05d6b491da45fae194f80a3b81074f75 Dec 03 17:56:03 crc kubenswrapper[5002]: I1203 17:56:03.256629 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f79bf7859-nxvrw"] Dec 03 17:56:03 crc kubenswrapper[5002]: I1203 17:56:03.378850 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 17:56:03 crc kubenswrapper[5002]: I1203 17:56:03.633895 5002 generic.go:334] "Generic (PLEG): container finished" podID="6a34a01e-9a6b-4b0f-bfbf-a2af8859558e" containerID="85be67dbad28fb945d5fb4d20d77ac5e440605abb498e575c39739132c4f189e" exitCode=0 Dec 03 17:56:03 crc kubenswrapper[5002]: I1203 17:56:03.633948 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" event={"ID":"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e","Type":"ContainerDied","Data":"85be67dbad28fb945d5fb4d20d77ac5e440605abb498e575c39739132c4f189e"} Dec 03 17:56:03 crc kubenswrapper[5002]: I1203 17:56:03.633985 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" event={"ID":"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e","Type":"ContainerStarted","Data":"b9f11ce461e1b81a25d97a6eaba0e72c05d6b491da45fae194f80a3b81074f75"} Dec 03 17:56:04 crc kubenswrapper[5002]: I1203 17:56:04.177000 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 17:56:04 crc kubenswrapper[5002]: I1203 17:56:04.640985 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" event={"ID":"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e","Type":"ContainerStarted","Data":"c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b"} Dec 03 17:56:04 crc kubenswrapper[5002]: I1203 17:56:04.641859 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:07 crc kubenswrapper[5002]: I1203 17:56:07.363431 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="6cea0e0f-b554-4cf2-a4d9-16d5487260a2" containerName="rabbitmq" containerID="cri-o://50dd6e5b6c8791eb9952b8ee23bc43523d2c1200ab318d4a4201384f751d15e3" gracePeriod=604797 Dec 03 17:56:07 crc kubenswrapper[5002]: I1203 17:56:07.988481 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="44133afa-0a34-417e-b163-44535b0e3b49" containerName="rabbitmq" containerID="cri-o://09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4" gracePeriod=604797 Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.031959 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.052367 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" podStartSLOduration=6.05235069 podStartE2EDuration="6.05235069s" podCreationTimestamp="2025-12-03 17:56:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:56:04.684686639 +0000 UTC m=+5088.098508517" watchObservedRunningTime="2025-12-03 17:56:08.05235069 +0000 UTC m=+5091.466172578" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.093510 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="6cea0e0f-b554-4cf2-a4d9-16d5487260a2" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.240:5671: connect: connection refused" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.100790 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cbb4f659c-wtqs6"] Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.101145 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" podUID="64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2" containerName="dnsmasq-dns" containerID="cri-o://b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4" gracePeriod=10 Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.499340 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="44133afa-0a34-417e-b163-44535b0e3b49" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.241:5671: connect: connection refused" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.500550 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.670068 5002 generic.go:334] "Generic (PLEG): container finished" podID="64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2" containerID="b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4" exitCode=0 Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.670108 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" event={"ID":"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2","Type":"ContainerDied","Data":"b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4"} Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.670133 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" event={"ID":"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2","Type":"ContainerDied","Data":"b87ad890eea3a2592ee79ffcab64b1d9a7143b60b4397eb38090bb3194032d1d"} Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.670148 5002 scope.go:117] "RemoveContainer" containerID="b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.670256 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cbb4f659c-wtqs6" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.681974 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfltf\" (UniqueName: \"kubernetes.io/projected/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-kube-api-access-gfltf\") pod \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\" (UID: \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\") " Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.682137 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-config\") pod \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\" (UID: \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\") " Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.682201 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-dns-svc\") pod \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\" (UID: \"64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2\") " Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.687215 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-kube-api-access-gfltf" (OuterVolumeSpecName: "kube-api-access-gfltf") pod "64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2" (UID: "64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2"). InnerVolumeSpecName "kube-api-access-gfltf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.691842 5002 scope.go:117] "RemoveContainer" containerID="2c70368d03d85e0331f7df6bd8d9de35364d0e28ca07007938fc0035c1d28311" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.718470 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2" (UID: "64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.720128 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-config" (OuterVolumeSpecName: "config") pod "64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2" (UID: "64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.771686 5002 scope.go:117] "RemoveContainer" containerID="b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4" Dec 03 17:56:08 crc kubenswrapper[5002]: E1203 17:56:08.772150 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4\": container with ID starting with b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4 not found: ID does not exist" containerID="b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.772188 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4"} err="failed to get container status \"b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4\": rpc error: code = NotFound desc = could not find container \"b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4\": container with ID starting with b48d10f7134ed93a27a465087a03cbec229d38e6e7303f76c29318c48e88ded4 not found: ID does not exist" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.772206 5002 scope.go:117] "RemoveContainer" containerID="2c70368d03d85e0331f7df6bd8d9de35364d0e28ca07007938fc0035c1d28311" Dec 03 17:56:08 crc kubenswrapper[5002]: E1203 17:56:08.772552 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c70368d03d85e0331f7df6bd8d9de35364d0e28ca07007938fc0035c1d28311\": container with ID starting with 2c70368d03d85e0331f7df6bd8d9de35364d0e28ca07007938fc0035c1d28311 not found: ID does not exist" containerID="2c70368d03d85e0331f7df6bd8d9de35364d0e28ca07007938fc0035c1d28311" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.773097 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c70368d03d85e0331f7df6bd8d9de35364d0e28ca07007938fc0035c1d28311"} err="failed to get container status \"2c70368d03d85e0331f7df6bd8d9de35364d0e28ca07007938fc0035c1d28311\": rpc error: code = NotFound desc = could not find container \"2c70368d03d85e0331f7df6bd8d9de35364d0e28ca07007938fc0035c1d28311\": container with ID starting with 2c70368d03d85e0331f7df6bd8d9de35364d0e28ca07007938fc0035c1d28311 not found: ID does not exist" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.783719 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-config\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.783764 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:08 crc kubenswrapper[5002]: I1203 17:56:08.783777 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfltf\" (UniqueName: \"kubernetes.io/projected/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2-kube-api-access-gfltf\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:09 crc kubenswrapper[5002]: I1203 17:56:09.011213 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cbb4f659c-wtqs6"] Dec 03 17:56:09 crc kubenswrapper[5002]: I1203 17:56:09.018593 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cbb4f659c-wtqs6"] Dec 03 17:56:10 crc kubenswrapper[5002]: I1203 17:56:10.848698 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2" path="/var/lib/kubelet/pods/64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2/volumes" Dec 03 17:56:13 crc kubenswrapper[5002]: I1203 17:56:13.714108 5002 generic.go:334] "Generic (PLEG): container finished" podID="6cea0e0f-b554-4cf2-a4d9-16d5487260a2" containerID="50dd6e5b6c8791eb9952b8ee23bc43523d2c1200ab318d4a4201384f751d15e3" exitCode=0 Dec 03 17:56:13 crc kubenswrapper[5002]: I1203 17:56:13.714339 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6cea0e0f-b554-4cf2-a4d9-16d5487260a2","Type":"ContainerDied","Data":"50dd6e5b6c8791eb9952b8ee23bc43523d2c1200ab318d4a4201384f751d15e3"} Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.237090 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.371419 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-tls\") pod \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.371483 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-plugins\") pod \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.371600 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-erlang-cookie\") pod \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.371791 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\") pod \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.371821 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-pod-info\") pod \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.371855 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-plugins-conf\") pod \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.371873 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-confd\") pod \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.371905 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9jlw\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-kube-api-access-n9jlw\") pod \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.371930 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-erlang-cookie-secret\") pod \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.371947 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-config-data\") pod \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.371970 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-server-conf\") pod \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\" (UID: \"6cea0e0f-b554-4cf2-a4d9-16d5487260a2\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.372088 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "6cea0e0f-b554-4cf2-a4d9-16d5487260a2" (UID: "6cea0e0f-b554-4cf2-a4d9-16d5487260a2"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.372129 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "6cea0e0f-b554-4cf2-a4d9-16d5487260a2" (UID: "6cea0e0f-b554-4cf2-a4d9-16d5487260a2"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.375168 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.375187 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.378236 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "6cea0e0f-b554-4cf2-a4d9-16d5487260a2" (UID: "6cea0e0f-b554-4cf2-a4d9-16d5487260a2"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.378421 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "6cea0e0f-b554-4cf2-a4d9-16d5487260a2" (UID: "6cea0e0f-b554-4cf2-a4d9-16d5487260a2"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.379993 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-pod-info" (OuterVolumeSpecName: "pod-info") pod "6cea0e0f-b554-4cf2-a4d9-16d5487260a2" (UID: "6cea0e0f-b554-4cf2-a4d9-16d5487260a2"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.385629 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-kube-api-access-n9jlw" (OuterVolumeSpecName: "kube-api-access-n9jlw") pod "6cea0e0f-b554-4cf2-a4d9-16d5487260a2" (UID: "6cea0e0f-b554-4cf2-a4d9-16d5487260a2"). InnerVolumeSpecName "kube-api-access-n9jlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.406405 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-config-data" (OuterVolumeSpecName: "config-data") pod "6cea0e0f-b554-4cf2-a4d9-16d5487260a2" (UID: "6cea0e0f-b554-4cf2-a4d9-16d5487260a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.410442 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e" (OuterVolumeSpecName: "persistence") pod "6cea0e0f-b554-4cf2-a4d9-16d5487260a2" (UID: "6cea0e0f-b554-4cf2-a4d9-16d5487260a2"). InnerVolumeSpecName "pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.421393 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "6cea0e0f-b554-4cf2-a4d9-16d5487260a2" (UID: "6cea0e0f-b554-4cf2-a4d9-16d5487260a2"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.446395 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-server-conf" (OuterVolumeSpecName: "server-conf") pod "6cea0e0f-b554-4cf2-a4d9-16d5487260a2" (UID: "6cea0e0f-b554-4cf2-a4d9-16d5487260a2"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.471305 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "6cea0e0f-b554-4cf2-a4d9-16d5487260a2" (UID: "6cea0e0f-b554-4cf2-a4d9-16d5487260a2"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.476230 5002 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.476263 5002 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.476272 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.476285 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9jlw\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-kube-api-access-n9jlw\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.476296 5002 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.476305 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.476315 5002 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.476326 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6cea0e0f-b554-4cf2-a4d9-16d5487260a2-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.476367 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\") on node \"crc\" " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.513333 5002 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.513547 5002 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e") on node "crc" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.522416 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.577486 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-plugins\") pod \"44133afa-0a34-417e-b163-44535b0e3b49\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.577558 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/44133afa-0a34-417e-b163-44535b0e3b49-pod-info\") pod \"44133afa-0a34-417e-b163-44535b0e3b49\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.577599 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-plugins-conf\") pod \"44133afa-0a34-417e-b163-44535b0e3b49\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.577651 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-confd\") pod \"44133afa-0a34-417e-b163-44535b0e3b49\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.577702 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5q4gt\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-kube-api-access-5q4gt\") pod \"44133afa-0a34-417e-b163-44535b0e3b49\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.577737 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-server-conf\") pod \"44133afa-0a34-417e-b163-44535b0e3b49\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.577782 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-erlang-cookie\") pod \"44133afa-0a34-417e-b163-44535b0e3b49\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.577819 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-tls\") pod \"44133afa-0a34-417e-b163-44535b0e3b49\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.577962 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\") pod \"44133afa-0a34-417e-b163-44535b0e3b49\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.577980 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-config-data\") pod \"44133afa-0a34-417e-b163-44535b0e3b49\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.578001 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/44133afa-0a34-417e-b163-44535b0e3b49-erlang-cookie-secret\") pod \"44133afa-0a34-417e-b163-44535b0e3b49\" (UID: \"44133afa-0a34-417e-b163-44535b0e3b49\") " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.578243 5002 reconciler_common.go:293] "Volume detached for volume \"pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.580528 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "44133afa-0a34-417e-b163-44535b0e3b49" (UID: "44133afa-0a34-417e-b163-44535b0e3b49"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.580813 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "44133afa-0a34-417e-b163-44535b0e3b49" (UID: "44133afa-0a34-417e-b163-44535b0e3b49"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.580847 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "44133afa-0a34-417e-b163-44535b0e3b49" (UID: "44133afa-0a34-417e-b163-44535b0e3b49"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.583567 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/44133afa-0a34-417e-b163-44535b0e3b49-pod-info" (OuterVolumeSpecName: "pod-info") pod "44133afa-0a34-417e-b163-44535b0e3b49" (UID: "44133afa-0a34-417e-b163-44535b0e3b49"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.591308 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-kube-api-access-5q4gt" (OuterVolumeSpecName: "kube-api-access-5q4gt") pod "44133afa-0a34-417e-b163-44535b0e3b49" (UID: "44133afa-0a34-417e-b163-44535b0e3b49"). InnerVolumeSpecName "kube-api-access-5q4gt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.591340 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44133afa-0a34-417e-b163-44535b0e3b49-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "44133afa-0a34-417e-b163-44535b0e3b49" (UID: "44133afa-0a34-417e-b163-44535b0e3b49"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.599444 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe" (OuterVolumeSpecName: "persistence") pod "44133afa-0a34-417e-b163-44535b0e3b49" (UID: "44133afa-0a34-417e-b163-44535b0e3b49"). InnerVolumeSpecName "pvc-419ddde7-b92a-4146-a319-b7ef603d21fe". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.599666 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-config-data" (OuterVolumeSpecName: "config-data") pod "44133afa-0a34-417e-b163-44535b0e3b49" (UID: "44133afa-0a34-417e-b163-44535b0e3b49"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.599819 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "44133afa-0a34-417e-b163-44535b0e3b49" (UID: "44133afa-0a34-417e-b163-44535b0e3b49"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.616369 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-server-conf" (OuterVolumeSpecName: "server-conf") pod "44133afa-0a34-417e-b163-44535b0e3b49" (UID: "44133afa-0a34-417e-b163-44535b0e3b49"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.655594 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "44133afa-0a34-417e-b163-44535b0e3b49" (UID: "44133afa-0a34-417e-b163-44535b0e3b49"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.678946 5002 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/44133afa-0a34-417e-b163-44535b0e3b49-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.679157 5002 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.679218 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.679294 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5q4gt\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-kube-api-access-5q4gt\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.679351 5002 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.679406 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.679468 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.679556 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\") on node \"crc\" " Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.679617 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/44133afa-0a34-417e-b163-44535b0e3b49-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.679683 5002 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/44133afa-0a34-417e-b163-44535b0e3b49-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.679769 5002 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/44133afa-0a34-417e-b163-44535b0e3b49-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.693142 5002 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.693292 5002 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-419ddde7-b92a-4146-a319-b7ef603d21fe" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe") on node "crc" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.724084 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6cea0e0f-b554-4cf2-a4d9-16d5487260a2","Type":"ContainerDied","Data":"4ba521b9bcde4c3e73ae48c279d4a624ed4f75b1cba92c08a8e8ca5332b46518"} Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.725042 5002 scope.go:117] "RemoveContainer" containerID="50dd6e5b6c8791eb9952b8ee23bc43523d2c1200ab318d4a4201384f751d15e3" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.724342 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.726274 5002 generic.go:334] "Generic (PLEG): container finished" podID="44133afa-0a34-417e-b163-44535b0e3b49" containerID="09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4" exitCode=0 Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.726315 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"44133afa-0a34-417e-b163-44535b0e3b49","Type":"ContainerDied","Data":"09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4"} Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.726342 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"44133afa-0a34-417e-b163-44535b0e3b49","Type":"ContainerDied","Data":"57f6c9f27f919ef708200452d52b3e81f7647eb84656d7dfdb5d326f0f836b8b"} Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.726395 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.781325 5002 reconciler_common.go:293] "Volume detached for volume \"pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\") on node \"crc\" DevicePath \"\"" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.794653 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.801132 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.803214 5002 scope.go:117] "RemoveContainer" containerID="e25d45c21395ae142b3d335bdd21be02cc18a464dbc055cc873496ca7d90c52c" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.808073 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.821135 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.836658 5002 scope.go:117] "RemoveContainer" containerID="09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.837484 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 17:56:14 crc kubenswrapper[5002]: E1203 17:56:14.837872 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44133afa-0a34-417e-b163-44535b0e3b49" containerName="setup-container" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.837891 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="44133afa-0a34-417e-b163-44535b0e3b49" containerName="setup-container" Dec 03 17:56:14 crc kubenswrapper[5002]: E1203 17:56:14.837900 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2" containerName="init" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.837906 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2" containerName="init" Dec 03 17:56:14 crc kubenswrapper[5002]: E1203 17:56:14.837927 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cea0e0f-b554-4cf2-a4d9-16d5487260a2" containerName="setup-container" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.837934 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cea0e0f-b554-4cf2-a4d9-16d5487260a2" containerName="setup-container" Dec 03 17:56:14 crc kubenswrapper[5002]: E1203 17:56:14.837950 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44133afa-0a34-417e-b163-44535b0e3b49" containerName="rabbitmq" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.837956 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="44133afa-0a34-417e-b163-44535b0e3b49" containerName="rabbitmq" Dec 03 17:56:14 crc kubenswrapper[5002]: E1203 17:56:14.837974 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cea0e0f-b554-4cf2-a4d9-16d5487260a2" containerName="rabbitmq" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.837980 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cea0e0f-b554-4cf2-a4d9-16d5487260a2" containerName="rabbitmq" Dec 03 17:56:14 crc kubenswrapper[5002]: E1203 17:56:14.837989 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2" containerName="dnsmasq-dns" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.837996 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2" containerName="dnsmasq-dns" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.838143 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="64f1ab3d-2baf-494e-9ba7-6ccbc39a29a2" containerName="dnsmasq-dns" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.838158 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cea0e0f-b554-4cf2-a4d9-16d5487260a2" containerName="rabbitmq" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.838165 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="44133afa-0a34-417e-b163-44535b0e3b49" containerName="rabbitmq" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.839008 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.842405 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.842535 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.842840 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.842919 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.844860 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.844873 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-64z69" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.845119 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.858482 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44133afa-0a34-417e-b163-44535b0e3b49" path="/var/lib/kubelet/pods/44133afa-0a34-417e-b163-44535b0e3b49/volumes" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.859313 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cea0e0f-b554-4cf2-a4d9-16d5487260a2" path="/var/lib/kubelet/pods/6cea0e0f-b554-4cf2-a4d9-16d5487260a2/volumes" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.859935 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.863045 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.863150 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.865115 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.865355 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.867809 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.868014 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-jfhll" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.868291 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.870097 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.870479 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.871952 5002 scope.go:117] "RemoveContainer" containerID="eb86557f898f104f5f31be5b3d9d63171d440c2f0e03deb3f2b58c889d02fd48" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.875281 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.884170 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/08191975-de54-4c2b-9776-d9b9a82211c5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.884248 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6dw9\" (UniqueName: \"kubernetes.io/projected/08191975-de54-4c2b-9776-d9b9a82211c5-kube-api-access-d6dw9\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.884578 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.884610 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/08191975-de54-4c2b-9776-d9b9a82211c5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.884647 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/08191975-de54-4c2b-9776-d9b9a82211c5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.884723 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/08191975-de54-4c2b-9776-d9b9a82211c5-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.884768 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/08191975-de54-4c2b-9776-d9b9a82211c5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.884896 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/08191975-de54-4c2b-9776-d9b9a82211c5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.884973 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/08191975-de54-4c2b-9776-d9b9a82211c5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.885007 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/08191975-de54-4c2b-9776-d9b9a82211c5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.885053 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/08191975-de54-4c2b-9776-d9b9a82211c5-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.909772 5002 scope.go:117] "RemoveContainer" containerID="09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4" Dec 03 17:56:14 crc kubenswrapper[5002]: E1203 17:56:14.910157 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4\": container with ID starting with 09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4 not found: ID does not exist" containerID="09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.910190 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4"} err="failed to get container status \"09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4\": rpc error: code = NotFound desc = could not find container \"09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4\": container with ID starting with 09e2c3aaf07f50634a4d85b6915d9b35e6af4d62a8b3393af99eb983f23e38d4 not found: ID does not exist" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.910209 5002 scope.go:117] "RemoveContainer" containerID="eb86557f898f104f5f31be5b3d9d63171d440c2f0e03deb3f2b58c889d02fd48" Dec 03 17:56:14 crc kubenswrapper[5002]: E1203 17:56:14.910455 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb86557f898f104f5f31be5b3d9d63171d440c2f0e03deb3f2b58c889d02fd48\": container with ID starting with eb86557f898f104f5f31be5b3d9d63171d440c2f0e03deb3f2b58c889d02fd48 not found: ID does not exist" containerID="eb86557f898f104f5f31be5b3d9d63171d440c2f0e03deb3f2b58c889d02fd48" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.910499 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb86557f898f104f5f31be5b3d9d63171d440c2f0e03deb3f2b58c889d02fd48"} err="failed to get container status \"eb86557f898f104f5f31be5b3d9d63171d440c2f0e03deb3f2b58c889d02fd48\": rpc error: code = NotFound desc = could not find container \"eb86557f898f104f5f31be5b3d9d63171d440c2f0e03deb3f2b58c889d02fd48\": container with ID starting with eb86557f898f104f5f31be5b3d9d63171d440c2f0e03deb3f2b58c889d02fd48 not found: ID does not exist" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.986977 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c4dade6f-e9f1-416a-a766-12f292375b21-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987096 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/08191975-de54-4c2b-9776-d9b9a82211c5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987132 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6dw9\" (UniqueName: \"kubernetes.io/projected/08191975-de54-4c2b-9776-d9b9a82211c5-kube-api-access-d6dw9\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987175 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c4dade6f-e9f1-416a-a766-12f292375b21-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987214 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj2hz\" (UniqueName: \"kubernetes.io/projected/c4dade6f-e9f1-416a-a766-12f292375b21-kube-api-access-rj2hz\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987258 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c4dade6f-e9f1-416a-a766-12f292375b21-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987449 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987528 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987560 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/08191975-de54-4c2b-9776-d9b9a82211c5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987585 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c4dade6f-e9f1-416a-a766-12f292375b21-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987605 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4dade6f-e9f1-416a-a766-12f292375b21-config-data\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987631 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/08191975-de54-4c2b-9776-d9b9a82211c5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987667 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c4dade6f-e9f1-416a-a766-12f292375b21-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987702 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c4dade6f-e9f1-416a-a766-12f292375b21-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987806 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/08191975-de54-4c2b-9776-d9b9a82211c5-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987827 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/08191975-de54-4c2b-9776-d9b9a82211c5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987860 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/08191975-de54-4c2b-9776-d9b9a82211c5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987905 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c4dade6f-e9f1-416a-a766-12f292375b21-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987923 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/08191975-de54-4c2b-9776-d9b9a82211c5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987949 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/08191975-de54-4c2b-9776-d9b9a82211c5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.987990 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c4dade6f-e9f1-416a-a766-12f292375b21-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.988011 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/08191975-de54-4c2b-9776-d9b9a82211c5-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.988402 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/08191975-de54-4c2b-9776-d9b9a82211c5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.989076 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/08191975-de54-4c2b-9776-d9b9a82211c5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.989346 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/08191975-de54-4c2b-9776-d9b9a82211c5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.989921 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/08191975-de54-4c2b-9776-d9b9a82211c5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.990095 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/08191975-de54-4c2b-9776-d9b9a82211c5-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.990615 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.990686 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f9842c43a86c944e2b3df50386d4b6ebe4493964e33fb144fba5d58ddbd0a6d7/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.991253 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/08191975-de54-4c2b-9776-d9b9a82211c5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.992211 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/08191975-de54-4c2b-9776-d9b9a82211c5-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.992352 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/08191975-de54-4c2b-9776-d9b9a82211c5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:14 crc kubenswrapper[5002]: I1203 17:56:14.994655 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/08191975-de54-4c2b-9776-d9b9a82211c5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.005724 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6dw9\" (UniqueName: \"kubernetes.io/projected/08191975-de54-4c2b-9776-d9b9a82211c5-kube-api-access-d6dw9\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.021392 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-419ddde7-b92a-4146-a319-b7ef603d21fe\") pod \"rabbitmq-cell1-server-0\" (UID: \"08191975-de54-4c2b-9776-d9b9a82211c5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.088871 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c4dade6f-e9f1-416a-a766-12f292375b21-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.088918 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c4dade6f-e9f1-416a-a766-12f292375b21-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.088945 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c4dade6f-e9f1-416a-a766-12f292375b21-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.088988 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c4dade6f-e9f1-416a-a766-12f292375b21-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.089012 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj2hz\" (UniqueName: \"kubernetes.io/projected/c4dade6f-e9f1-416a-a766-12f292375b21-kube-api-access-rj2hz\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.089027 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c4dade6f-e9f1-416a-a766-12f292375b21-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.089053 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.089078 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c4dade6f-e9f1-416a-a766-12f292375b21-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.089102 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4dade6f-e9f1-416a-a766-12f292375b21-config-data\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.089124 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c4dade6f-e9f1-416a-a766-12f292375b21-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.089142 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c4dade6f-e9f1-416a-a766-12f292375b21-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.089373 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c4dade6f-e9f1-416a-a766-12f292375b21-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.089770 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c4dade6f-e9f1-416a-a766-12f292375b21-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.090851 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4dade6f-e9f1-416a-a766-12f292375b21-config-data\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.091214 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.091228 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c4dade6f-e9f1-416a-a766-12f292375b21-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.091268 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/af4a5dc127f6ade3b95cda6b38edc9b33be7dc25ffeedb37e4032a4859a29303/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.091667 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c4dade6f-e9f1-416a-a766-12f292375b21-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.093139 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c4dade6f-e9f1-416a-a766-12f292375b21-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.093584 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c4dade6f-e9f1-416a-a766-12f292375b21-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.094192 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c4dade6f-e9f1-416a-a766-12f292375b21-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.094905 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c4dade6f-e9f1-416a-a766-12f292375b21-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.113859 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj2hz\" (UniqueName: \"kubernetes.io/projected/c4dade6f-e9f1-416a-a766-12f292375b21-kube-api-access-rj2hz\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.137013 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f6867ae3-b34c-43bd-9cae-0e5e4355464e\") pod \"rabbitmq-server-0\" (UID: \"c4dade6f-e9f1-416a-a766-12f292375b21\") " pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.167274 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.190721 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.462345 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.493312 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 17:56:15 crc kubenswrapper[5002]: W1203 17:56:15.497398 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4dade6f_e9f1_416a_a766_12f292375b21.slice/crio-2166d02b750fe6e576267ded8c82199c0b86493ca536c64c6192f928883ed736 WatchSource:0}: Error finding container 2166d02b750fe6e576267ded8c82199c0b86493ca536c64c6192f928883ed736: Status 404 returned error can't find the container with id 2166d02b750fe6e576267ded8c82199c0b86493ca536c64c6192f928883ed736 Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.734485 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c4dade6f-e9f1-416a-a766-12f292375b21","Type":"ContainerStarted","Data":"2166d02b750fe6e576267ded8c82199c0b86493ca536c64c6192f928883ed736"} Dec 03 17:56:15 crc kubenswrapper[5002]: I1203 17:56:15.736280 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"08191975-de54-4c2b-9776-d9b9a82211c5","Type":"ContainerStarted","Data":"695abb1957bcb2efa227f9b20aa48f9a759f91bbb5b6bc53934efec391a3a66d"} Dec 03 17:56:17 crc kubenswrapper[5002]: I1203 17:56:17.755163 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"08191975-de54-4c2b-9776-d9b9a82211c5","Type":"ContainerStarted","Data":"62792f1413c420b6f45a5ce6ab96dac6bd7063bfb9304d9c7bb0d2523b2bcee2"} Dec 03 17:56:17 crc kubenswrapper[5002]: I1203 17:56:17.757467 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c4dade6f-e9f1-416a-a766-12f292375b21","Type":"ContainerStarted","Data":"dd2a274f36a81a2527f6c36c0875b611e1781d625aa6eeab65b86e3fffae3acb"} Dec 03 17:56:50 crc kubenswrapper[5002]: I1203 17:56:50.031625 5002 generic.go:334] "Generic (PLEG): container finished" podID="08191975-de54-4c2b-9776-d9b9a82211c5" containerID="62792f1413c420b6f45a5ce6ab96dac6bd7063bfb9304d9c7bb0d2523b2bcee2" exitCode=0 Dec 03 17:56:50 crc kubenswrapper[5002]: I1203 17:56:50.031697 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"08191975-de54-4c2b-9776-d9b9a82211c5","Type":"ContainerDied","Data":"62792f1413c420b6f45a5ce6ab96dac6bd7063bfb9304d9c7bb0d2523b2bcee2"} Dec 03 17:56:50 crc kubenswrapper[5002]: I1203 17:56:50.037091 5002 generic.go:334] "Generic (PLEG): container finished" podID="c4dade6f-e9f1-416a-a766-12f292375b21" containerID="dd2a274f36a81a2527f6c36c0875b611e1781d625aa6eeab65b86e3fffae3acb" exitCode=0 Dec 03 17:56:50 crc kubenswrapper[5002]: I1203 17:56:50.037145 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c4dade6f-e9f1-416a-a766-12f292375b21","Type":"ContainerDied","Data":"dd2a274f36a81a2527f6c36c0875b611e1781d625aa6eeab65b86e3fffae3acb"} Dec 03 17:56:51 crc kubenswrapper[5002]: I1203 17:56:51.045536 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c4dade6f-e9f1-416a-a766-12f292375b21","Type":"ContainerStarted","Data":"26ed97fb847b5cb455b87c063d3ec457ac269aacec3fd63d1fc2795a5269d8d9"} Dec 03 17:56:51 crc kubenswrapper[5002]: I1203 17:56:51.046954 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 17:56:51 crc kubenswrapper[5002]: I1203 17:56:51.049036 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"08191975-de54-4c2b-9776-d9b9a82211c5","Type":"ContainerStarted","Data":"28dea580bf9c89234824cd6b1513258c80eb71672a5a8f57e965365afa576aeb"} Dec 03 17:56:51 crc kubenswrapper[5002]: I1203 17:56:51.049580 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:56:51 crc kubenswrapper[5002]: I1203 17:56:51.069557 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.069534895 podStartE2EDuration="37.069534895s" podCreationTimestamp="2025-12-03 17:56:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:56:51.066845892 +0000 UTC m=+5134.480667810" watchObservedRunningTime="2025-12-03 17:56:51.069534895 +0000 UTC m=+5134.483356783" Dec 03 17:56:51 crc kubenswrapper[5002]: I1203 17:56:51.091138 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.091120416 podStartE2EDuration="37.091120416s" podCreationTimestamp="2025-12-03 17:56:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:56:51.084479957 +0000 UTC m=+5134.498301845" watchObservedRunningTime="2025-12-03 17:56:51.091120416 +0000 UTC m=+5134.504942304" Dec 03 17:57:05 crc kubenswrapper[5002]: I1203 17:57:05.171139 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 17:57:05 crc kubenswrapper[5002]: I1203 17:57:05.195203 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 17:57:10 crc kubenswrapper[5002]: I1203 17:57:10.242376 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Dec 03 17:57:10 crc kubenswrapper[5002]: I1203 17:57:10.244507 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Dec 03 17:57:10 crc kubenswrapper[5002]: I1203 17:57:10.248728 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-vk84c" Dec 03 17:57:10 crc kubenswrapper[5002]: I1203 17:57:10.259197 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Dec 03 17:57:10 crc kubenswrapper[5002]: I1203 17:57:10.394160 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6sz2\" (UniqueName: \"kubernetes.io/projected/65db70e8-0626-4dff-a8d8-e33948b9b9ec-kube-api-access-l6sz2\") pod \"mariadb-client-1-default\" (UID: \"65db70e8-0626-4dff-a8d8-e33948b9b9ec\") " pod="openstack/mariadb-client-1-default" Dec 03 17:57:10 crc kubenswrapper[5002]: I1203 17:57:10.495779 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6sz2\" (UniqueName: \"kubernetes.io/projected/65db70e8-0626-4dff-a8d8-e33948b9b9ec-kube-api-access-l6sz2\") pod \"mariadb-client-1-default\" (UID: \"65db70e8-0626-4dff-a8d8-e33948b9b9ec\") " pod="openstack/mariadb-client-1-default" Dec 03 17:57:10 crc kubenswrapper[5002]: I1203 17:57:10.529152 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6sz2\" (UniqueName: \"kubernetes.io/projected/65db70e8-0626-4dff-a8d8-e33948b9b9ec-kube-api-access-l6sz2\") pod \"mariadb-client-1-default\" (UID: \"65db70e8-0626-4dff-a8d8-e33948b9b9ec\") " pod="openstack/mariadb-client-1-default" Dec 03 17:57:10 crc kubenswrapper[5002]: I1203 17:57:10.571044 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Dec 03 17:57:11 crc kubenswrapper[5002]: I1203 17:57:11.092319 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Dec 03 17:57:11 crc kubenswrapper[5002]: I1203 17:57:11.106542 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:57:11 crc kubenswrapper[5002]: I1203 17:57:11.208336 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"65db70e8-0626-4dff-a8d8-e33948b9b9ec","Type":"ContainerStarted","Data":"c36bffa83d3507b607e2f8c63d59816a91775d782194857a22d1e86205205d0a"} Dec 03 17:57:12 crc kubenswrapper[5002]: I1203 17:57:12.216943 5002 generic.go:334] "Generic (PLEG): container finished" podID="65db70e8-0626-4dff-a8d8-e33948b9b9ec" containerID="2ac1ba3163a6ab8315ff5c1742b68a316e2db2e06c4031534b4a211041ba7d42" exitCode=0 Dec 03 17:57:12 crc kubenswrapper[5002]: I1203 17:57:12.217074 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"65db70e8-0626-4dff-a8d8-e33948b9b9ec","Type":"ContainerDied","Data":"2ac1ba3163a6ab8315ff5c1742b68a316e2db2e06c4031534b4a211041ba7d42"} Dec 03 17:57:13 crc kubenswrapper[5002]: I1203 17:57:13.593496 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Dec 03 17:57:13 crc kubenswrapper[5002]: I1203 17:57:13.624278 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_65db70e8-0626-4dff-a8d8-e33948b9b9ec/mariadb-client-1-default/0.log" Dec 03 17:57:13 crc kubenswrapper[5002]: I1203 17:57:13.650469 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Dec 03 17:57:13 crc kubenswrapper[5002]: I1203 17:57:13.657610 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Dec 03 17:57:13 crc kubenswrapper[5002]: I1203 17:57:13.751986 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6sz2\" (UniqueName: \"kubernetes.io/projected/65db70e8-0626-4dff-a8d8-e33948b9b9ec-kube-api-access-l6sz2\") pod \"65db70e8-0626-4dff-a8d8-e33948b9b9ec\" (UID: \"65db70e8-0626-4dff-a8d8-e33948b9b9ec\") " Dec 03 17:57:13 crc kubenswrapper[5002]: I1203 17:57:13.757918 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65db70e8-0626-4dff-a8d8-e33948b9b9ec-kube-api-access-l6sz2" (OuterVolumeSpecName: "kube-api-access-l6sz2") pod "65db70e8-0626-4dff-a8d8-e33948b9b9ec" (UID: "65db70e8-0626-4dff-a8d8-e33948b9b9ec"). InnerVolumeSpecName "kube-api-access-l6sz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:57:13 crc kubenswrapper[5002]: I1203 17:57:13.854430 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6sz2\" (UniqueName: \"kubernetes.io/projected/65db70e8-0626-4dff-a8d8-e33948b9b9ec-kube-api-access-l6sz2\") on node \"crc\" DevicePath \"\"" Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.105685 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Dec 03 17:57:14 crc kubenswrapper[5002]: E1203 17:57:14.106036 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65db70e8-0626-4dff-a8d8-e33948b9b9ec" containerName="mariadb-client-1-default" Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.106049 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="65db70e8-0626-4dff-a8d8-e33948b9b9ec" containerName="mariadb-client-1-default" Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.106210 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="65db70e8-0626-4dff-a8d8-e33948b9b9ec" containerName="mariadb-client-1-default" Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.106758 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.117994 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.234117 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c36bffa83d3507b607e2f8c63d59816a91775d782194857a22d1e86205205d0a" Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.234201 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.259327 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvf7j\" (UniqueName: \"kubernetes.io/projected/012cdcf2-2036-463f-bbdb-77c831606bb0-kube-api-access-kvf7j\") pod \"mariadb-client-2-default\" (UID: \"012cdcf2-2036-463f-bbdb-77c831606bb0\") " pod="openstack/mariadb-client-2-default" Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.360440 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvf7j\" (UniqueName: \"kubernetes.io/projected/012cdcf2-2036-463f-bbdb-77c831606bb0-kube-api-access-kvf7j\") pod \"mariadb-client-2-default\" (UID: \"012cdcf2-2036-463f-bbdb-77c831606bb0\") " pod="openstack/mariadb-client-2-default" Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.382260 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvf7j\" (UniqueName: \"kubernetes.io/projected/012cdcf2-2036-463f-bbdb-77c831606bb0-kube-api-access-kvf7j\") pod \"mariadb-client-2-default\" (UID: \"012cdcf2-2036-463f-bbdb-77c831606bb0\") " pod="openstack/mariadb-client-2-default" Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.430910 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.854402 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65db70e8-0626-4dff-a8d8-e33948b9b9ec" path="/var/lib/kubelet/pods/65db70e8-0626-4dff-a8d8-e33948b9b9ec/volumes" Dec 03 17:57:14 crc kubenswrapper[5002]: I1203 17:57:14.925785 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Dec 03 17:57:14 crc kubenswrapper[5002]: W1203 17:57:14.935120 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod012cdcf2_2036_463f_bbdb_77c831606bb0.slice/crio-1ca8c4ce74b8b7287709f53f85f0fa78aa749982b0491c9b1fb4bd039c4d906a WatchSource:0}: Error finding container 1ca8c4ce74b8b7287709f53f85f0fa78aa749982b0491c9b1fb4bd039c4d906a: Status 404 returned error can't find the container with id 1ca8c4ce74b8b7287709f53f85f0fa78aa749982b0491c9b1fb4bd039c4d906a Dec 03 17:57:15 crc kubenswrapper[5002]: I1203 17:57:15.241898 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"012cdcf2-2036-463f-bbdb-77c831606bb0","Type":"ContainerStarted","Data":"73574fec0559724b7855921401f25bf483b23da9a758d8cc8b2e1e2788edd7ce"} Dec 03 17:57:15 crc kubenswrapper[5002]: I1203 17:57:15.241943 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"012cdcf2-2036-463f-bbdb-77c831606bb0","Type":"ContainerStarted","Data":"1ca8c4ce74b8b7287709f53f85f0fa78aa749982b0491c9b1fb4bd039c4d906a"} Dec 03 17:57:15 crc kubenswrapper[5002]: I1203 17:57:15.260156 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-2-default" podStartSLOduration=1.260134032 podStartE2EDuration="1.260134032s" podCreationTimestamp="2025-12-03 17:57:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:57:15.254104189 +0000 UTC m=+5158.667926077" watchObservedRunningTime="2025-12-03 17:57:15.260134032 +0000 UTC m=+5158.673955920" Dec 03 17:57:16 crc kubenswrapper[5002]: I1203 17:57:16.252916 5002 generic.go:334] "Generic (PLEG): container finished" podID="012cdcf2-2036-463f-bbdb-77c831606bb0" containerID="73574fec0559724b7855921401f25bf483b23da9a758d8cc8b2e1e2788edd7ce" exitCode=1 Dec 03 17:57:16 crc kubenswrapper[5002]: I1203 17:57:16.253043 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"012cdcf2-2036-463f-bbdb-77c831606bb0","Type":"ContainerDied","Data":"73574fec0559724b7855921401f25bf483b23da9a758d8cc8b2e1e2788edd7ce"} Dec 03 17:57:17 crc kubenswrapper[5002]: I1203 17:57:17.619220 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Dec 03 17:57:17 crc kubenswrapper[5002]: I1203 17:57:17.664018 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Dec 03 17:57:17 crc kubenswrapper[5002]: I1203 17:57:17.669389 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Dec 03 17:57:17 crc kubenswrapper[5002]: I1203 17:57:17.716315 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvf7j\" (UniqueName: \"kubernetes.io/projected/012cdcf2-2036-463f-bbdb-77c831606bb0-kube-api-access-kvf7j\") pod \"012cdcf2-2036-463f-bbdb-77c831606bb0\" (UID: \"012cdcf2-2036-463f-bbdb-77c831606bb0\") " Dec 03 17:57:17 crc kubenswrapper[5002]: I1203 17:57:17.723275 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/012cdcf2-2036-463f-bbdb-77c831606bb0-kube-api-access-kvf7j" (OuterVolumeSpecName: "kube-api-access-kvf7j") pod "012cdcf2-2036-463f-bbdb-77c831606bb0" (UID: "012cdcf2-2036-463f-bbdb-77c831606bb0"). InnerVolumeSpecName "kube-api-access-kvf7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:57:17 crc kubenswrapper[5002]: I1203 17:57:17.819003 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvf7j\" (UniqueName: \"kubernetes.io/projected/012cdcf2-2036-463f-bbdb-77c831606bb0-kube-api-access-kvf7j\") on node \"crc\" DevicePath \"\"" Dec 03 17:57:18 crc kubenswrapper[5002]: I1203 17:57:18.066018 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Dec 03 17:57:18 crc kubenswrapper[5002]: E1203 17:57:18.066620 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="012cdcf2-2036-463f-bbdb-77c831606bb0" containerName="mariadb-client-2-default" Dec 03 17:57:18 crc kubenswrapper[5002]: I1203 17:57:18.066641 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="012cdcf2-2036-463f-bbdb-77c831606bb0" containerName="mariadb-client-2-default" Dec 03 17:57:18 crc kubenswrapper[5002]: I1203 17:57:18.066847 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="012cdcf2-2036-463f-bbdb-77c831606bb0" containerName="mariadb-client-2-default" Dec 03 17:57:18 crc kubenswrapper[5002]: I1203 17:57:18.067373 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Dec 03 17:57:18 crc kubenswrapper[5002]: I1203 17:57:18.076411 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Dec 03 17:57:18 crc kubenswrapper[5002]: I1203 17:57:18.230538 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98hm6\" (UniqueName: \"kubernetes.io/projected/25bc9035-dc92-4e3a-a326-040a2b505574-kube-api-access-98hm6\") pod \"mariadb-client-1\" (UID: \"25bc9035-dc92-4e3a-a326-040a2b505574\") " pod="openstack/mariadb-client-1" Dec 03 17:57:18 crc kubenswrapper[5002]: I1203 17:57:18.277620 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ca8c4ce74b8b7287709f53f85f0fa78aa749982b0491c9b1fb4bd039c4d906a" Dec 03 17:57:18 crc kubenswrapper[5002]: I1203 17:57:18.277661 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Dec 03 17:57:18 crc kubenswrapper[5002]: I1203 17:57:18.332713 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98hm6\" (UniqueName: \"kubernetes.io/projected/25bc9035-dc92-4e3a-a326-040a2b505574-kube-api-access-98hm6\") pod \"mariadb-client-1\" (UID: \"25bc9035-dc92-4e3a-a326-040a2b505574\") " pod="openstack/mariadb-client-1" Dec 03 17:57:18 crc kubenswrapper[5002]: I1203 17:57:18.356874 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98hm6\" (UniqueName: \"kubernetes.io/projected/25bc9035-dc92-4e3a-a326-040a2b505574-kube-api-access-98hm6\") pod \"mariadb-client-1\" (UID: \"25bc9035-dc92-4e3a-a326-040a2b505574\") " pod="openstack/mariadb-client-1" Dec 03 17:57:18 crc kubenswrapper[5002]: I1203 17:57:18.396853 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Dec 03 17:57:19 crc kubenswrapper[5002]: I1203 17:57:18.850711 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="012cdcf2-2036-463f-bbdb-77c831606bb0" path="/var/lib/kubelet/pods/012cdcf2-2036-463f-bbdb-77c831606bb0/volumes" Dec 03 17:57:19 crc kubenswrapper[5002]: I1203 17:57:19.637410 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Dec 03 17:57:20 crc kubenswrapper[5002]: I1203 17:57:20.292273 5002 generic.go:334] "Generic (PLEG): container finished" podID="25bc9035-dc92-4e3a-a326-040a2b505574" containerID="5fab71267d08749ccd0eb51b2e4d13d376055a5d826daf8bd3b316ab390e0345" exitCode=0 Dec 03 17:57:20 crc kubenswrapper[5002]: I1203 17:57:20.292386 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"25bc9035-dc92-4e3a-a326-040a2b505574","Type":"ContainerDied","Data":"5fab71267d08749ccd0eb51b2e4d13d376055a5d826daf8bd3b316ab390e0345"} Dec 03 17:57:20 crc kubenswrapper[5002]: I1203 17:57:20.292712 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"25bc9035-dc92-4e3a-a326-040a2b505574","Type":"ContainerStarted","Data":"cbf15bc64c6608a1ca74972988e60a92925141b2c322620f6a10babfb40ebd94"} Dec 03 17:57:21 crc kubenswrapper[5002]: I1203 17:57:21.697679 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Dec 03 17:57:21 crc kubenswrapper[5002]: I1203 17:57:21.729632 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_25bc9035-dc92-4e3a-a326-040a2b505574/mariadb-client-1/0.log" Dec 03 17:57:21 crc kubenswrapper[5002]: I1203 17:57:21.753857 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Dec 03 17:57:21 crc kubenswrapper[5002]: I1203 17:57:21.758446 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Dec 03 17:57:21 crc kubenswrapper[5002]: I1203 17:57:21.785431 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98hm6\" (UniqueName: \"kubernetes.io/projected/25bc9035-dc92-4e3a-a326-040a2b505574-kube-api-access-98hm6\") pod \"25bc9035-dc92-4e3a-a326-040a2b505574\" (UID: \"25bc9035-dc92-4e3a-a326-040a2b505574\") " Dec 03 17:57:21 crc kubenswrapper[5002]: I1203 17:57:21.804019 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25bc9035-dc92-4e3a-a326-040a2b505574-kube-api-access-98hm6" (OuterVolumeSpecName: "kube-api-access-98hm6") pod "25bc9035-dc92-4e3a-a326-040a2b505574" (UID: "25bc9035-dc92-4e3a-a326-040a2b505574"). InnerVolumeSpecName "kube-api-access-98hm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:57:21 crc kubenswrapper[5002]: I1203 17:57:21.887598 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98hm6\" (UniqueName: \"kubernetes.io/projected/25bc9035-dc92-4e3a-a326-040a2b505574-kube-api-access-98hm6\") on node \"crc\" DevicePath \"\"" Dec 03 17:57:22 crc kubenswrapper[5002]: I1203 17:57:22.203249 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Dec 03 17:57:22 crc kubenswrapper[5002]: E1203 17:57:22.203630 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25bc9035-dc92-4e3a-a326-040a2b505574" containerName="mariadb-client-1" Dec 03 17:57:22 crc kubenswrapper[5002]: I1203 17:57:22.203653 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="25bc9035-dc92-4e3a-a326-040a2b505574" containerName="mariadb-client-1" Dec 03 17:57:22 crc kubenswrapper[5002]: I1203 17:57:22.203923 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="25bc9035-dc92-4e3a-a326-040a2b505574" containerName="mariadb-client-1" Dec 03 17:57:22 crc kubenswrapper[5002]: I1203 17:57:22.204580 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Dec 03 17:57:22 crc kubenswrapper[5002]: I1203 17:57:22.209422 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Dec 03 17:57:22 crc kubenswrapper[5002]: I1203 17:57:22.294154 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmmpc\" (UniqueName: \"kubernetes.io/projected/332e5ff8-b658-4062-ad77-a301be51fd0c-kube-api-access-dmmpc\") pod \"mariadb-client-4-default\" (UID: \"332e5ff8-b658-4062-ad77-a301be51fd0c\") " pod="openstack/mariadb-client-4-default" Dec 03 17:57:22 crc kubenswrapper[5002]: I1203 17:57:22.311653 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbf15bc64c6608a1ca74972988e60a92925141b2c322620f6a10babfb40ebd94" Dec 03 17:57:22 crc kubenswrapper[5002]: I1203 17:57:22.311729 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Dec 03 17:57:22 crc kubenswrapper[5002]: I1203 17:57:22.396242 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmmpc\" (UniqueName: \"kubernetes.io/projected/332e5ff8-b658-4062-ad77-a301be51fd0c-kube-api-access-dmmpc\") pod \"mariadb-client-4-default\" (UID: \"332e5ff8-b658-4062-ad77-a301be51fd0c\") " pod="openstack/mariadb-client-4-default" Dec 03 17:57:22 crc kubenswrapper[5002]: I1203 17:57:22.411275 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmmpc\" (UniqueName: \"kubernetes.io/projected/332e5ff8-b658-4062-ad77-a301be51fd0c-kube-api-access-dmmpc\") pod \"mariadb-client-4-default\" (UID: \"332e5ff8-b658-4062-ad77-a301be51fd0c\") " pod="openstack/mariadb-client-4-default" Dec 03 17:57:22 crc kubenswrapper[5002]: I1203 17:57:22.529074 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Dec 03 17:57:22 crc kubenswrapper[5002]: I1203 17:57:22.856647 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25bc9035-dc92-4e3a-a326-040a2b505574" path="/var/lib/kubelet/pods/25bc9035-dc92-4e3a-a326-040a2b505574/volumes" Dec 03 17:57:23 crc kubenswrapper[5002]: I1203 17:57:23.050963 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Dec 03 17:57:23 crc kubenswrapper[5002]: W1203 17:57:23.058933 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod332e5ff8_b658_4062_ad77_a301be51fd0c.slice/crio-b317d26e9c43b6bfd05e38d4361e2bdcdbac9ab82ed36e17b6d6ab22e8a5a0ea WatchSource:0}: Error finding container b317d26e9c43b6bfd05e38d4361e2bdcdbac9ab82ed36e17b6d6ab22e8a5a0ea: Status 404 returned error can't find the container with id b317d26e9c43b6bfd05e38d4361e2bdcdbac9ab82ed36e17b6d6ab22e8a5a0ea Dec 03 17:57:23 crc kubenswrapper[5002]: I1203 17:57:23.319891 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"332e5ff8-b658-4062-ad77-a301be51fd0c","Type":"ContainerStarted","Data":"b317d26e9c43b6bfd05e38d4361e2bdcdbac9ab82ed36e17b6d6ab22e8a5a0ea"} Dec 03 17:57:24 crc kubenswrapper[5002]: I1203 17:57:24.340279 5002 generic.go:334] "Generic (PLEG): container finished" podID="332e5ff8-b658-4062-ad77-a301be51fd0c" containerID="f69e97e7cca483b1f1e2d0d7bd7fb2bafc4e541164ce4ba45ed423c1bfd7b867" exitCode=0 Dec 03 17:57:24 crc kubenswrapper[5002]: I1203 17:57:24.340339 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"332e5ff8-b658-4062-ad77-a301be51fd0c","Type":"ContainerDied","Data":"f69e97e7cca483b1f1e2d0d7bd7fb2bafc4e541164ce4ba45ed423c1bfd7b867"} Dec 03 17:57:25 crc kubenswrapper[5002]: I1203 17:57:25.682975 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Dec 03 17:57:25 crc kubenswrapper[5002]: I1203 17:57:25.699597 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_332e5ff8-b658-4062-ad77-a301be51fd0c/mariadb-client-4-default/0.log" Dec 03 17:57:25 crc kubenswrapper[5002]: I1203 17:57:25.727177 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Dec 03 17:57:25 crc kubenswrapper[5002]: I1203 17:57:25.732915 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Dec 03 17:57:25 crc kubenswrapper[5002]: I1203 17:57:25.849867 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmmpc\" (UniqueName: \"kubernetes.io/projected/332e5ff8-b658-4062-ad77-a301be51fd0c-kube-api-access-dmmpc\") pod \"332e5ff8-b658-4062-ad77-a301be51fd0c\" (UID: \"332e5ff8-b658-4062-ad77-a301be51fd0c\") " Dec 03 17:57:25 crc kubenswrapper[5002]: I1203 17:57:25.858982 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/332e5ff8-b658-4062-ad77-a301be51fd0c-kube-api-access-dmmpc" (OuterVolumeSpecName: "kube-api-access-dmmpc") pod "332e5ff8-b658-4062-ad77-a301be51fd0c" (UID: "332e5ff8-b658-4062-ad77-a301be51fd0c"). InnerVolumeSpecName "kube-api-access-dmmpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:57:25 crc kubenswrapper[5002]: I1203 17:57:25.951772 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmmpc\" (UniqueName: \"kubernetes.io/projected/332e5ff8-b658-4062-ad77-a301be51fd0c-kube-api-access-dmmpc\") on node \"crc\" DevicePath \"\"" Dec 03 17:57:26 crc kubenswrapper[5002]: I1203 17:57:26.361159 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b317d26e9c43b6bfd05e38d4361e2bdcdbac9ab82ed36e17b6d6ab22e8a5a0ea" Dec 03 17:57:26 crc kubenswrapper[5002]: I1203 17:57:26.361299 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Dec 03 17:57:26 crc kubenswrapper[5002]: I1203 17:57:26.853295 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="332e5ff8-b658-4062-ad77-a301be51fd0c" path="/var/lib/kubelet/pods/332e5ff8-b658-4062-ad77-a301be51fd0c/volumes" Dec 03 17:57:29 crc kubenswrapper[5002]: I1203 17:57:29.462834 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Dec 03 17:57:29 crc kubenswrapper[5002]: E1203 17:57:29.463554 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="332e5ff8-b658-4062-ad77-a301be51fd0c" containerName="mariadb-client-4-default" Dec 03 17:57:29 crc kubenswrapper[5002]: I1203 17:57:29.463567 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="332e5ff8-b658-4062-ad77-a301be51fd0c" containerName="mariadb-client-4-default" Dec 03 17:57:29 crc kubenswrapper[5002]: I1203 17:57:29.463725 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="332e5ff8-b658-4062-ad77-a301be51fd0c" containerName="mariadb-client-4-default" Dec 03 17:57:29 crc kubenswrapper[5002]: I1203 17:57:29.464195 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Dec 03 17:57:29 crc kubenswrapper[5002]: I1203 17:57:29.469883 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-vk84c" Dec 03 17:57:29 crc kubenswrapper[5002]: I1203 17:57:29.486318 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Dec 03 17:57:29 crc kubenswrapper[5002]: I1203 17:57:29.554552 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnlzh\" (UniqueName: \"kubernetes.io/projected/a2f20231-2d60-4624-a46b-2e2aee9dbc28-kube-api-access-lnlzh\") pod \"mariadb-client-5-default\" (UID: \"a2f20231-2d60-4624-a46b-2e2aee9dbc28\") " pod="openstack/mariadb-client-5-default" Dec 03 17:57:29 crc kubenswrapper[5002]: I1203 17:57:29.655399 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnlzh\" (UniqueName: \"kubernetes.io/projected/a2f20231-2d60-4624-a46b-2e2aee9dbc28-kube-api-access-lnlzh\") pod \"mariadb-client-5-default\" (UID: \"a2f20231-2d60-4624-a46b-2e2aee9dbc28\") " pod="openstack/mariadb-client-5-default" Dec 03 17:57:29 crc kubenswrapper[5002]: I1203 17:57:29.683856 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnlzh\" (UniqueName: \"kubernetes.io/projected/a2f20231-2d60-4624-a46b-2e2aee9dbc28-kube-api-access-lnlzh\") pod \"mariadb-client-5-default\" (UID: \"a2f20231-2d60-4624-a46b-2e2aee9dbc28\") " pod="openstack/mariadb-client-5-default" Dec 03 17:57:29 crc kubenswrapper[5002]: I1203 17:57:29.826188 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Dec 03 17:57:30 crc kubenswrapper[5002]: I1203 17:57:30.323290 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Dec 03 17:57:30 crc kubenswrapper[5002]: I1203 17:57:30.433350 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"a2f20231-2d60-4624-a46b-2e2aee9dbc28","Type":"ContainerStarted","Data":"b1604b7ba46fc4740bd27c3da684223e652ac4d7507bab1055614a98a7580455"} Dec 03 17:57:31 crc kubenswrapper[5002]: I1203 17:57:31.441610 5002 generic.go:334] "Generic (PLEG): container finished" podID="a2f20231-2d60-4624-a46b-2e2aee9dbc28" containerID="bdf7c0980e78cc578e85804716aa6ae7b31dbbc98ab902ce185b016f67a2e836" exitCode=0 Dec 03 17:57:31 crc kubenswrapper[5002]: I1203 17:57:31.441660 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"a2f20231-2d60-4624-a46b-2e2aee9dbc28","Type":"ContainerDied","Data":"bdf7c0980e78cc578e85804716aa6ae7b31dbbc98ab902ce185b016f67a2e836"} Dec 03 17:57:32 crc kubenswrapper[5002]: I1203 17:57:32.939767 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Dec 03 17:57:32 crc kubenswrapper[5002]: I1203 17:57:32.957319 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_a2f20231-2d60-4624-a46b-2e2aee9dbc28/mariadb-client-5-default/0.log" Dec 03 17:57:32 crc kubenswrapper[5002]: I1203 17:57:32.983351 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Dec 03 17:57:32 crc kubenswrapper[5002]: I1203 17:57:32.989515 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.108481 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnlzh\" (UniqueName: \"kubernetes.io/projected/a2f20231-2d60-4624-a46b-2e2aee9dbc28-kube-api-access-lnlzh\") pod \"a2f20231-2d60-4624-a46b-2e2aee9dbc28\" (UID: \"a2f20231-2d60-4624-a46b-2e2aee9dbc28\") " Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.116421 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2f20231-2d60-4624-a46b-2e2aee9dbc28-kube-api-access-lnlzh" (OuterVolumeSpecName: "kube-api-access-lnlzh") pod "a2f20231-2d60-4624-a46b-2e2aee9dbc28" (UID: "a2f20231-2d60-4624-a46b-2e2aee9dbc28"). InnerVolumeSpecName "kube-api-access-lnlzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.129945 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Dec 03 17:57:33 crc kubenswrapper[5002]: E1203 17:57:33.130430 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2f20231-2d60-4624-a46b-2e2aee9dbc28" containerName="mariadb-client-5-default" Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.130457 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2f20231-2d60-4624-a46b-2e2aee9dbc28" containerName="mariadb-client-5-default" Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.130689 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2f20231-2d60-4624-a46b-2e2aee9dbc28" containerName="mariadb-client-5-default" Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.131349 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.144915 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.211293 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnlzh\" (UniqueName: \"kubernetes.io/projected/a2f20231-2d60-4624-a46b-2e2aee9dbc28-kube-api-access-lnlzh\") on node \"crc\" DevicePath \"\"" Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.312559 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br85v\" (UniqueName: \"kubernetes.io/projected/84a83524-6ff9-4d2a-a90b-c2cb0db02623-kube-api-access-br85v\") pod \"mariadb-client-6-default\" (UID: \"84a83524-6ff9-4d2a-a90b-c2cb0db02623\") " pod="openstack/mariadb-client-6-default" Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.414060 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br85v\" (UniqueName: \"kubernetes.io/projected/84a83524-6ff9-4d2a-a90b-c2cb0db02623-kube-api-access-br85v\") pod \"mariadb-client-6-default\" (UID: \"84a83524-6ff9-4d2a-a90b-c2cb0db02623\") " pod="openstack/mariadb-client-6-default" Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.431439 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br85v\" (UniqueName: \"kubernetes.io/projected/84a83524-6ff9-4d2a-a90b-c2cb0db02623-kube-api-access-br85v\") pod \"mariadb-client-6-default\" (UID: \"84a83524-6ff9-4d2a-a90b-c2cb0db02623\") " pod="openstack/mariadb-client-6-default" Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.460336 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1604b7ba46fc4740bd27c3da684223e652ac4d7507bab1055614a98a7580455" Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.460417 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Dec 03 17:57:33 crc kubenswrapper[5002]: I1203 17:57:33.463425 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Dec 03 17:57:34 crc kubenswrapper[5002]: I1203 17:57:34.276817 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Dec 03 17:57:34 crc kubenswrapper[5002]: W1203 17:57:34.282828 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84a83524_6ff9_4d2a_a90b_c2cb0db02623.slice/crio-cb61aaae185f142069c69429dd402b64737c84c417ed5d52f2f36d5cac8f24d5 WatchSource:0}: Error finding container cb61aaae185f142069c69429dd402b64737c84c417ed5d52f2f36d5cac8f24d5: Status 404 returned error can't find the container with id cb61aaae185f142069c69429dd402b64737c84c417ed5d52f2f36d5cac8f24d5 Dec 03 17:57:34 crc kubenswrapper[5002]: I1203 17:57:34.470363 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"84a83524-6ff9-4d2a-a90b-c2cb0db02623","Type":"ContainerStarted","Data":"cb61aaae185f142069c69429dd402b64737c84c417ed5d52f2f36d5cac8f24d5"} Dec 03 17:57:34 crc kubenswrapper[5002]: I1203 17:57:34.848822 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2f20231-2d60-4624-a46b-2e2aee9dbc28" path="/var/lib/kubelet/pods/a2f20231-2d60-4624-a46b-2e2aee9dbc28/volumes" Dec 03 17:57:35 crc kubenswrapper[5002]: I1203 17:57:35.479821 5002 generic.go:334] "Generic (PLEG): container finished" podID="84a83524-6ff9-4d2a-a90b-c2cb0db02623" containerID="6270943a8ab633596e0efde623b5328e9a7af8774de52a69dbb6ccd69815ed40" exitCode=1 Dec 03 17:57:35 crc kubenswrapper[5002]: I1203 17:57:35.479884 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"84a83524-6ff9-4d2a-a90b-c2cb0db02623","Type":"ContainerDied","Data":"6270943a8ab633596e0efde623b5328e9a7af8774de52a69dbb6ccd69815ed40"} Dec 03 17:57:36 crc kubenswrapper[5002]: I1203 17:57:36.964740 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Dec 03 17:57:36 crc kubenswrapper[5002]: I1203 17:57:36.989460 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_84a83524-6ff9-4d2a-a90b-c2cb0db02623/mariadb-client-6-default/0.log" Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.030835 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.042640 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.069870 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br85v\" (UniqueName: \"kubernetes.io/projected/84a83524-6ff9-4d2a-a90b-c2cb0db02623-kube-api-access-br85v\") pod \"84a83524-6ff9-4d2a-a90b-c2cb0db02623\" (UID: \"84a83524-6ff9-4d2a-a90b-c2cb0db02623\") " Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.076203 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84a83524-6ff9-4d2a-a90b-c2cb0db02623-kube-api-access-br85v" (OuterVolumeSpecName: "kube-api-access-br85v") pod "84a83524-6ff9-4d2a-a90b-c2cb0db02623" (UID: "84a83524-6ff9-4d2a-a90b-c2cb0db02623"). InnerVolumeSpecName "kube-api-access-br85v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.150145 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Dec 03 17:57:37 crc kubenswrapper[5002]: E1203 17:57:37.150527 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a83524-6ff9-4d2a-a90b-c2cb0db02623" containerName="mariadb-client-6-default" Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.150547 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a83524-6ff9-4d2a-a90b-c2cb0db02623" containerName="mariadb-client-6-default" Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.150732 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="84a83524-6ff9-4d2a-a90b-c2cb0db02623" containerName="mariadb-client-6-default" Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.151347 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.159162 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.171740 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br85v\" (UniqueName: \"kubernetes.io/projected/84a83524-6ff9-4d2a-a90b-c2cb0db02623-kube-api-access-br85v\") on node \"crc\" DevicePath \"\"" Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.272878 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzvk9\" (UniqueName: \"kubernetes.io/projected/999ab877-8acc-41fa-9957-5786adde75a6-kube-api-access-rzvk9\") pod \"mariadb-client-7-default\" (UID: \"999ab877-8acc-41fa-9957-5786adde75a6\") " pod="openstack/mariadb-client-7-default" Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.375185 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzvk9\" (UniqueName: \"kubernetes.io/projected/999ab877-8acc-41fa-9957-5786adde75a6-kube-api-access-rzvk9\") pod \"mariadb-client-7-default\" (UID: \"999ab877-8acc-41fa-9957-5786adde75a6\") " pod="openstack/mariadb-client-7-default" Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.394577 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzvk9\" (UniqueName: \"kubernetes.io/projected/999ab877-8acc-41fa-9957-5786adde75a6-kube-api-access-rzvk9\") pod \"mariadb-client-7-default\" (UID: \"999ab877-8acc-41fa-9957-5786adde75a6\") " pod="openstack/mariadb-client-7-default" Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.465617 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.497979 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb61aaae185f142069c69429dd402b64737c84c417ed5d52f2f36d5cac8f24d5" Dec 03 17:57:37 crc kubenswrapper[5002]: I1203 17:57:37.498028 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Dec 03 17:57:38 crc kubenswrapper[5002]: I1203 17:57:38.031170 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Dec 03 17:57:38 crc kubenswrapper[5002]: W1203 17:57:38.033135 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod999ab877_8acc_41fa_9957_5786adde75a6.slice/crio-300c3b9b6fea3b34aa00c5c9a9be3ebab072a4827960f7dad528938972eb6428 WatchSource:0}: Error finding container 300c3b9b6fea3b34aa00c5c9a9be3ebab072a4827960f7dad528938972eb6428: Status 404 returned error can't find the container with id 300c3b9b6fea3b34aa00c5c9a9be3ebab072a4827960f7dad528938972eb6428 Dec 03 17:57:38 crc kubenswrapper[5002]: I1203 17:57:38.506296 5002 generic.go:334] "Generic (PLEG): container finished" podID="999ab877-8acc-41fa-9957-5786adde75a6" containerID="38ea96ee4013ce1f9ebe3eff1a886f4146f4be30b9ddb3981021ab3593cabd4e" exitCode=0 Dec 03 17:57:38 crc kubenswrapper[5002]: I1203 17:57:38.506458 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"999ab877-8acc-41fa-9957-5786adde75a6","Type":"ContainerDied","Data":"38ea96ee4013ce1f9ebe3eff1a886f4146f4be30b9ddb3981021ab3593cabd4e"} Dec 03 17:57:38 crc kubenswrapper[5002]: I1203 17:57:38.506679 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"999ab877-8acc-41fa-9957-5786adde75a6","Type":"ContainerStarted","Data":"300c3b9b6fea3b34aa00c5c9a9be3ebab072a4827960f7dad528938972eb6428"} Dec 03 17:57:38 crc kubenswrapper[5002]: I1203 17:57:38.849370 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84a83524-6ff9-4d2a-a90b-c2cb0db02623" path="/var/lib/kubelet/pods/84a83524-6ff9-4d2a-a90b-c2cb0db02623/volumes" Dec 03 17:57:39 crc kubenswrapper[5002]: I1203 17:57:39.884100 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Dec 03 17:57:39 crc kubenswrapper[5002]: I1203 17:57:39.911331 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_999ab877-8acc-41fa-9957-5786adde75a6/mariadb-client-7-default/0.log" Dec 03 17:57:39 crc kubenswrapper[5002]: I1203 17:57:39.925424 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzvk9\" (UniqueName: \"kubernetes.io/projected/999ab877-8acc-41fa-9957-5786adde75a6-kube-api-access-rzvk9\") pod \"999ab877-8acc-41fa-9957-5786adde75a6\" (UID: \"999ab877-8acc-41fa-9957-5786adde75a6\") " Dec 03 17:57:39 crc kubenswrapper[5002]: I1203 17:57:39.946198 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Dec 03 17:57:39 crc kubenswrapper[5002]: I1203 17:57:39.946415 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/999ab877-8acc-41fa-9957-5786adde75a6-kube-api-access-rzvk9" (OuterVolumeSpecName: "kube-api-access-rzvk9") pod "999ab877-8acc-41fa-9957-5786adde75a6" (UID: "999ab877-8acc-41fa-9957-5786adde75a6"). InnerVolumeSpecName "kube-api-access-rzvk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:57:39 crc kubenswrapper[5002]: I1203 17:57:39.953833 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.027010 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzvk9\" (UniqueName: \"kubernetes.io/projected/999ab877-8acc-41fa-9957-5786adde75a6-kube-api-access-rzvk9\") on node \"crc\" DevicePath \"\"" Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.067529 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Dec 03 17:57:40 crc kubenswrapper[5002]: E1203 17:57:40.068486 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="999ab877-8acc-41fa-9957-5786adde75a6" containerName="mariadb-client-7-default" Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.068510 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="999ab877-8acc-41fa-9957-5786adde75a6" containerName="mariadb-client-7-default" Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.068659 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="999ab877-8acc-41fa-9957-5786adde75a6" containerName="mariadb-client-7-default" Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.069279 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.076281 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.230491 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8668d\" (UniqueName: \"kubernetes.io/projected/a657d33d-a626-48a9-bb06-fc633cca6f4b-kube-api-access-8668d\") pod \"mariadb-client-2\" (UID: \"a657d33d-a626-48a9-bb06-fc633cca6f4b\") " pod="openstack/mariadb-client-2" Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.332343 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8668d\" (UniqueName: \"kubernetes.io/projected/a657d33d-a626-48a9-bb06-fc633cca6f4b-kube-api-access-8668d\") pod \"mariadb-client-2\" (UID: \"a657d33d-a626-48a9-bb06-fc633cca6f4b\") " pod="openstack/mariadb-client-2" Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.355925 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8668d\" (UniqueName: \"kubernetes.io/projected/a657d33d-a626-48a9-bb06-fc633cca6f4b-kube-api-access-8668d\") pod \"mariadb-client-2\" (UID: \"a657d33d-a626-48a9-bb06-fc633cca6f4b\") " pod="openstack/mariadb-client-2" Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.390345 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.526101 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="300c3b9b6fea3b34aa00c5c9a9be3ebab072a4827960f7dad528938972eb6428" Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.526191 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.854051 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="999ab877-8acc-41fa-9957-5786adde75a6" path="/var/lib/kubelet/pods/999ab877-8acc-41fa-9957-5786adde75a6/volumes" Dec 03 17:57:40 crc kubenswrapper[5002]: I1203 17:57:40.956771 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Dec 03 17:57:41 crc kubenswrapper[5002]: I1203 17:57:41.538954 5002 generic.go:334] "Generic (PLEG): container finished" podID="a657d33d-a626-48a9-bb06-fc633cca6f4b" containerID="6bff7622be464a398ae2990bf6dde7f63430fa018017eaadd417e8f76b73bdfd" exitCode=0 Dec 03 17:57:41 crc kubenswrapper[5002]: I1203 17:57:41.539046 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"a657d33d-a626-48a9-bb06-fc633cca6f4b","Type":"ContainerDied","Data":"6bff7622be464a398ae2990bf6dde7f63430fa018017eaadd417e8f76b73bdfd"} Dec 03 17:57:41 crc kubenswrapper[5002]: I1203 17:57:41.539252 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"a657d33d-a626-48a9-bb06-fc633cca6f4b","Type":"ContainerStarted","Data":"d33af1c64868456e2e5f818219604045496eb503877fd39c0fcd2d98587a256f"} Dec 03 17:57:42 crc kubenswrapper[5002]: I1203 17:57:42.934981 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Dec 03 17:57:42 crc kubenswrapper[5002]: I1203 17:57:42.960983 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_a657d33d-a626-48a9-bb06-fc633cca6f4b/mariadb-client-2/0.log" Dec 03 17:57:42 crc kubenswrapper[5002]: I1203 17:57:42.987270 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Dec 03 17:57:42 crc kubenswrapper[5002]: I1203 17:57:42.995174 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Dec 03 17:57:43 crc kubenswrapper[5002]: I1203 17:57:43.071233 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8668d\" (UniqueName: \"kubernetes.io/projected/a657d33d-a626-48a9-bb06-fc633cca6f4b-kube-api-access-8668d\") pod \"a657d33d-a626-48a9-bb06-fc633cca6f4b\" (UID: \"a657d33d-a626-48a9-bb06-fc633cca6f4b\") " Dec 03 17:57:43 crc kubenswrapper[5002]: I1203 17:57:43.076821 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a657d33d-a626-48a9-bb06-fc633cca6f4b-kube-api-access-8668d" (OuterVolumeSpecName: "kube-api-access-8668d") pod "a657d33d-a626-48a9-bb06-fc633cca6f4b" (UID: "a657d33d-a626-48a9-bb06-fc633cca6f4b"). InnerVolumeSpecName "kube-api-access-8668d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:57:43 crc kubenswrapper[5002]: I1203 17:57:43.173982 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8668d\" (UniqueName: \"kubernetes.io/projected/a657d33d-a626-48a9-bb06-fc633cca6f4b-kube-api-access-8668d\") on node \"crc\" DevicePath \"\"" Dec 03 17:57:43 crc kubenswrapper[5002]: I1203 17:57:43.555932 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d33af1c64868456e2e5f818219604045496eb503877fd39c0fcd2d98587a256f" Dec 03 17:57:43 crc kubenswrapper[5002]: I1203 17:57:43.555968 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Dec 03 17:57:44 crc kubenswrapper[5002]: I1203 17:57:44.851387 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a657d33d-a626-48a9-bb06-fc633cca6f4b" path="/var/lib/kubelet/pods/a657d33d-a626-48a9-bb06-fc633cca6f4b/volumes" Dec 03 17:58:20 crc kubenswrapper[5002]: I1203 17:58:20.917340 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:58:20 crc kubenswrapper[5002]: I1203 17:58:20.918227 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.591927 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fzw6l"] Dec 03 17:58:44 crc kubenswrapper[5002]: E1203 17:58:44.594128 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a657d33d-a626-48a9-bb06-fc633cca6f4b" containerName="mariadb-client-2" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.594345 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a657d33d-a626-48a9-bb06-fc633cca6f4b" containerName="mariadb-client-2" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.594616 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a657d33d-a626-48a9-bb06-fc633cca6f4b" containerName="mariadb-client-2" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.598735 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzw6l"] Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.598979 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.615792 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gr2d\" (UniqueName: \"kubernetes.io/projected/0656130a-949a-4b83-b7a3-635dc211baab-kube-api-access-8gr2d\") pod \"redhat-marketplace-fzw6l\" (UID: \"0656130a-949a-4b83-b7a3-635dc211baab\") " pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.615850 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0656130a-949a-4b83-b7a3-635dc211baab-catalog-content\") pod \"redhat-marketplace-fzw6l\" (UID: \"0656130a-949a-4b83-b7a3-635dc211baab\") " pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.615914 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0656130a-949a-4b83-b7a3-635dc211baab-utilities\") pod \"redhat-marketplace-fzw6l\" (UID: \"0656130a-949a-4b83-b7a3-635dc211baab\") " pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.717370 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gr2d\" (UniqueName: \"kubernetes.io/projected/0656130a-949a-4b83-b7a3-635dc211baab-kube-api-access-8gr2d\") pod \"redhat-marketplace-fzw6l\" (UID: \"0656130a-949a-4b83-b7a3-635dc211baab\") " pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.717783 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0656130a-949a-4b83-b7a3-635dc211baab-catalog-content\") pod \"redhat-marketplace-fzw6l\" (UID: \"0656130a-949a-4b83-b7a3-635dc211baab\") " pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.717942 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0656130a-949a-4b83-b7a3-635dc211baab-utilities\") pod \"redhat-marketplace-fzw6l\" (UID: \"0656130a-949a-4b83-b7a3-635dc211baab\") " pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.718318 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0656130a-949a-4b83-b7a3-635dc211baab-catalog-content\") pod \"redhat-marketplace-fzw6l\" (UID: \"0656130a-949a-4b83-b7a3-635dc211baab\") " pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.718324 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0656130a-949a-4b83-b7a3-635dc211baab-utilities\") pod \"redhat-marketplace-fzw6l\" (UID: \"0656130a-949a-4b83-b7a3-635dc211baab\") " pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.735936 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gr2d\" (UniqueName: \"kubernetes.io/projected/0656130a-949a-4b83-b7a3-635dc211baab-kube-api-access-8gr2d\") pod \"redhat-marketplace-fzw6l\" (UID: \"0656130a-949a-4b83-b7a3-635dc211baab\") " pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:44 crc kubenswrapper[5002]: I1203 17:58:44.918137 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:45 crc kubenswrapper[5002]: I1203 17:58:45.360247 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzw6l"] Dec 03 17:58:46 crc kubenswrapper[5002]: I1203 17:58:46.097957 5002 generic.go:334] "Generic (PLEG): container finished" podID="0656130a-949a-4b83-b7a3-635dc211baab" containerID="7461c943307297b7303780fb26fa0ad556f89592405f814e3ed6a2138994c55b" exitCode=0 Dec 03 17:58:46 crc kubenswrapper[5002]: I1203 17:58:46.098123 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzw6l" event={"ID":"0656130a-949a-4b83-b7a3-635dc211baab","Type":"ContainerDied","Data":"7461c943307297b7303780fb26fa0ad556f89592405f814e3ed6a2138994c55b"} Dec 03 17:58:46 crc kubenswrapper[5002]: I1203 17:58:46.098320 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzw6l" event={"ID":"0656130a-949a-4b83-b7a3-635dc211baab","Type":"ContainerStarted","Data":"ae6f65ff5b61d1cb96fdeca5184567d5a35a1841d32874dab04c5450637b2fc6"} Dec 03 17:58:47 crc kubenswrapper[5002]: I1203 17:58:47.113713 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzw6l" event={"ID":"0656130a-949a-4b83-b7a3-635dc211baab","Type":"ContainerStarted","Data":"9999dfe0a92e494b2407c1f0a621da336915d07eaf867a48689f0c7bc8d7a321"} Dec 03 17:58:48 crc kubenswrapper[5002]: I1203 17:58:48.127220 5002 generic.go:334] "Generic (PLEG): container finished" podID="0656130a-949a-4b83-b7a3-635dc211baab" containerID="9999dfe0a92e494b2407c1f0a621da336915d07eaf867a48689f0c7bc8d7a321" exitCode=0 Dec 03 17:58:48 crc kubenswrapper[5002]: I1203 17:58:48.127274 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzw6l" event={"ID":"0656130a-949a-4b83-b7a3-635dc211baab","Type":"ContainerDied","Data":"9999dfe0a92e494b2407c1f0a621da336915d07eaf867a48689f0c7bc8d7a321"} Dec 03 17:58:48 crc kubenswrapper[5002]: I1203 17:58:48.127307 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzw6l" event={"ID":"0656130a-949a-4b83-b7a3-635dc211baab","Type":"ContainerStarted","Data":"ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214"} Dec 03 17:58:48 crc kubenswrapper[5002]: I1203 17:58:48.158478 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fzw6l" podStartSLOduration=2.744032567 podStartE2EDuration="4.158445973s" podCreationTimestamp="2025-12-03 17:58:44 +0000 UTC" firstStartedPulling="2025-12-03 17:58:46.102085034 +0000 UTC m=+5249.515906922" lastFinishedPulling="2025-12-03 17:58:47.5164984 +0000 UTC m=+5250.930320328" observedRunningTime="2025-12-03 17:58:48.152536503 +0000 UTC m=+5251.566358391" watchObservedRunningTime="2025-12-03 17:58:48.158445973 +0000 UTC m=+5251.572267901" Dec 03 17:58:50 crc kubenswrapper[5002]: I1203 17:58:50.916981 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:58:50 crc kubenswrapper[5002]: I1203 17:58:50.917594 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:58:54 crc kubenswrapper[5002]: I1203 17:58:54.918994 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:54 crc kubenswrapper[5002]: I1203 17:58:54.919343 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:54 crc kubenswrapper[5002]: I1203 17:58:54.996363 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:55 crc kubenswrapper[5002]: I1203 17:58:55.240448 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:55 crc kubenswrapper[5002]: I1203 17:58:55.294111 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzw6l"] Dec 03 17:58:57 crc kubenswrapper[5002]: I1203 17:58:57.204150 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fzw6l" podUID="0656130a-949a-4b83-b7a3-635dc211baab" containerName="registry-server" containerID="cri-o://ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214" gracePeriod=2 Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.145932 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.218254 5002 generic.go:334] "Generic (PLEG): container finished" podID="0656130a-949a-4b83-b7a3-635dc211baab" containerID="ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214" exitCode=0 Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.218578 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzw6l" event={"ID":"0656130a-949a-4b83-b7a3-635dc211baab","Type":"ContainerDied","Data":"ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214"} Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.218630 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzw6l" event={"ID":"0656130a-949a-4b83-b7a3-635dc211baab","Type":"ContainerDied","Data":"ae6f65ff5b61d1cb96fdeca5184567d5a35a1841d32874dab04c5450637b2fc6"} Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.218660 5002 scope.go:117] "RemoveContainer" containerID="ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.218885 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fzw6l" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.246210 5002 scope.go:117] "RemoveContainer" containerID="9999dfe0a92e494b2407c1f0a621da336915d07eaf867a48689f0c7bc8d7a321" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.270595 5002 scope.go:117] "RemoveContainer" containerID="7461c943307297b7303780fb26fa0ad556f89592405f814e3ed6a2138994c55b" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.313680 5002 scope.go:117] "RemoveContainer" containerID="ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214" Dec 03 17:58:58 crc kubenswrapper[5002]: E1203 17:58:58.314322 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214\": container with ID starting with ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214 not found: ID does not exist" containerID="ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.314385 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214"} err="failed to get container status \"ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214\": rpc error: code = NotFound desc = could not find container \"ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214\": container with ID starting with ee643e1451f10f6211f0988af60a87fef8bfa72e48189b8d4259df18affe4214 not found: ID does not exist" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.314425 5002 scope.go:117] "RemoveContainer" containerID="9999dfe0a92e494b2407c1f0a621da336915d07eaf867a48689f0c7bc8d7a321" Dec 03 17:58:58 crc kubenswrapper[5002]: E1203 17:58:58.315008 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9999dfe0a92e494b2407c1f0a621da336915d07eaf867a48689f0c7bc8d7a321\": container with ID starting with 9999dfe0a92e494b2407c1f0a621da336915d07eaf867a48689f0c7bc8d7a321 not found: ID does not exist" containerID="9999dfe0a92e494b2407c1f0a621da336915d07eaf867a48689f0c7bc8d7a321" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.315072 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9999dfe0a92e494b2407c1f0a621da336915d07eaf867a48689f0c7bc8d7a321"} err="failed to get container status \"9999dfe0a92e494b2407c1f0a621da336915d07eaf867a48689f0c7bc8d7a321\": rpc error: code = NotFound desc = could not find container \"9999dfe0a92e494b2407c1f0a621da336915d07eaf867a48689f0c7bc8d7a321\": container with ID starting with 9999dfe0a92e494b2407c1f0a621da336915d07eaf867a48689f0c7bc8d7a321 not found: ID does not exist" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.315107 5002 scope.go:117] "RemoveContainer" containerID="7461c943307297b7303780fb26fa0ad556f89592405f814e3ed6a2138994c55b" Dec 03 17:58:58 crc kubenswrapper[5002]: E1203 17:58:58.315455 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7461c943307297b7303780fb26fa0ad556f89592405f814e3ed6a2138994c55b\": container with ID starting with 7461c943307297b7303780fb26fa0ad556f89592405f814e3ed6a2138994c55b not found: ID does not exist" containerID="7461c943307297b7303780fb26fa0ad556f89592405f814e3ed6a2138994c55b" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.315488 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7461c943307297b7303780fb26fa0ad556f89592405f814e3ed6a2138994c55b"} err="failed to get container status \"7461c943307297b7303780fb26fa0ad556f89592405f814e3ed6a2138994c55b\": rpc error: code = NotFound desc = could not find container \"7461c943307297b7303780fb26fa0ad556f89592405f814e3ed6a2138994c55b\": container with ID starting with 7461c943307297b7303780fb26fa0ad556f89592405f814e3ed6a2138994c55b not found: ID does not exist" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.347154 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0656130a-949a-4b83-b7a3-635dc211baab-catalog-content\") pod \"0656130a-949a-4b83-b7a3-635dc211baab\" (UID: \"0656130a-949a-4b83-b7a3-635dc211baab\") " Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.347267 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gr2d\" (UniqueName: \"kubernetes.io/projected/0656130a-949a-4b83-b7a3-635dc211baab-kube-api-access-8gr2d\") pod \"0656130a-949a-4b83-b7a3-635dc211baab\" (UID: \"0656130a-949a-4b83-b7a3-635dc211baab\") " Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.347348 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0656130a-949a-4b83-b7a3-635dc211baab-utilities\") pod \"0656130a-949a-4b83-b7a3-635dc211baab\" (UID: \"0656130a-949a-4b83-b7a3-635dc211baab\") " Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.348435 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0656130a-949a-4b83-b7a3-635dc211baab-utilities" (OuterVolumeSpecName: "utilities") pod "0656130a-949a-4b83-b7a3-635dc211baab" (UID: "0656130a-949a-4b83-b7a3-635dc211baab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.356086 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0656130a-949a-4b83-b7a3-635dc211baab-kube-api-access-8gr2d" (OuterVolumeSpecName: "kube-api-access-8gr2d") pod "0656130a-949a-4b83-b7a3-635dc211baab" (UID: "0656130a-949a-4b83-b7a3-635dc211baab"). InnerVolumeSpecName "kube-api-access-8gr2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.387842 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0656130a-949a-4b83-b7a3-635dc211baab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0656130a-949a-4b83-b7a3-635dc211baab" (UID: "0656130a-949a-4b83-b7a3-635dc211baab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.450475 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gr2d\" (UniqueName: \"kubernetes.io/projected/0656130a-949a-4b83-b7a3-635dc211baab-kube-api-access-8gr2d\") on node \"crc\" DevicePath \"\"" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.450528 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0656130a-949a-4b83-b7a3-635dc211baab-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.450549 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0656130a-949a-4b83-b7a3-635dc211baab-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.575393 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzw6l"] Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.581575 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzw6l"] Dec 03 17:58:58 crc kubenswrapper[5002]: I1203 17:58:58.857270 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0656130a-949a-4b83-b7a3-635dc211baab" path="/var/lib/kubelet/pods/0656130a-949a-4b83-b7a3-635dc211baab/volumes" Dec 03 17:59:20 crc kubenswrapper[5002]: I1203 17:59:20.916894 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:59:20 crc kubenswrapper[5002]: I1203 17:59:20.918087 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:59:20 crc kubenswrapper[5002]: I1203 17:59:20.918237 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 17:59:20 crc kubenswrapper[5002]: I1203 17:59:20.919025 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:59:20 crc kubenswrapper[5002]: I1203 17:59:20.919183 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" gracePeriod=600 Dec 03 17:59:21 crc kubenswrapper[5002]: E1203 17:59:21.048495 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:59:21 crc kubenswrapper[5002]: I1203 17:59:21.430564 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" exitCode=0 Dec 03 17:59:21 crc kubenswrapper[5002]: I1203 17:59:21.430631 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f"} Dec 03 17:59:21 crc kubenswrapper[5002]: I1203 17:59:21.430702 5002 scope.go:117] "RemoveContainer" containerID="94e292ef11c6c843fa462b5f23f7bcb46c279bfc5926f3fae23ecbd8649fdcdd" Dec 03 17:59:21 crc kubenswrapper[5002]: I1203 17:59:21.431825 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 17:59:21 crc kubenswrapper[5002]: E1203 17:59:21.432213 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:59:32 crc kubenswrapper[5002]: I1203 17:59:32.840790 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 17:59:32 crc kubenswrapper[5002]: E1203 17:59:32.841491 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:59:41 crc kubenswrapper[5002]: I1203 17:59:41.180449 5002 scope.go:117] "RemoveContainer" containerID="61ae4fcf06dfb03b5adea70a8c9eb57451b48449823d1c03a22e4fe8c5233eb2" Dec 03 17:59:41 crc kubenswrapper[5002]: I1203 17:59:41.204853 5002 scope.go:117] "RemoveContainer" containerID="e389fb6d345d20c7d84e3b2df1b4c57b04d483a48593175064a76d66aaa28abe" Dec 03 17:59:41 crc kubenswrapper[5002]: I1203 17:59:41.227452 5002 scope.go:117] "RemoveContainer" containerID="0dca195e6d697d02830c48ff47b16f77583284457f280469d9e62c68088c4e93" Dec 03 17:59:41 crc kubenswrapper[5002]: I1203 17:59:41.263475 5002 scope.go:117] "RemoveContainer" containerID="572eb31c1495153c3ab571b1dac9b13428288146b5efe31669f72e18d5b62af1" Dec 03 17:59:43 crc kubenswrapper[5002]: I1203 17:59:43.840619 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 17:59:43 crc kubenswrapper[5002]: E1203 17:59:43.841411 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 17:59:56 crc kubenswrapper[5002]: I1203 17:59:56.847528 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 17:59:56 crc kubenswrapper[5002]: E1203 17:59:56.848229 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.145088 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b"] Dec 03 18:00:00 crc kubenswrapper[5002]: E1203 18:00:00.145635 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0656130a-949a-4b83-b7a3-635dc211baab" containerName="registry-server" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.145647 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0656130a-949a-4b83-b7a3-635dc211baab" containerName="registry-server" Dec 03 18:00:00 crc kubenswrapper[5002]: E1203 18:00:00.145659 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0656130a-949a-4b83-b7a3-635dc211baab" containerName="extract-content" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.145664 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0656130a-949a-4b83-b7a3-635dc211baab" containerName="extract-content" Dec 03 18:00:00 crc kubenswrapper[5002]: E1203 18:00:00.145897 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0656130a-949a-4b83-b7a3-635dc211baab" containerName="extract-utilities" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.145904 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0656130a-949a-4b83-b7a3-635dc211baab" containerName="extract-utilities" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.146060 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="0656130a-949a-4b83-b7a3-635dc211baab" containerName="registry-server" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.146620 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.150098 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.150098 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.159267 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b"] Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.339521 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2675815e-6913-42b9-a7fa-1167c6625302-secret-volume\") pod \"collect-profiles-29413080-tp25b\" (UID: \"2675815e-6913-42b9-a7fa-1167c6625302\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.339594 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bp59s\" (UniqueName: \"kubernetes.io/projected/2675815e-6913-42b9-a7fa-1167c6625302-kube-api-access-bp59s\") pod \"collect-profiles-29413080-tp25b\" (UID: \"2675815e-6913-42b9-a7fa-1167c6625302\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.339700 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2675815e-6913-42b9-a7fa-1167c6625302-config-volume\") pod \"collect-profiles-29413080-tp25b\" (UID: \"2675815e-6913-42b9-a7fa-1167c6625302\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.441026 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2675815e-6913-42b9-a7fa-1167c6625302-config-volume\") pod \"collect-profiles-29413080-tp25b\" (UID: \"2675815e-6913-42b9-a7fa-1167c6625302\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.441254 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2675815e-6913-42b9-a7fa-1167c6625302-secret-volume\") pod \"collect-profiles-29413080-tp25b\" (UID: \"2675815e-6913-42b9-a7fa-1167c6625302\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.441316 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bp59s\" (UniqueName: \"kubernetes.io/projected/2675815e-6913-42b9-a7fa-1167c6625302-kube-api-access-bp59s\") pod \"collect-profiles-29413080-tp25b\" (UID: \"2675815e-6913-42b9-a7fa-1167c6625302\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.443384 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2675815e-6913-42b9-a7fa-1167c6625302-config-volume\") pod \"collect-profiles-29413080-tp25b\" (UID: \"2675815e-6913-42b9-a7fa-1167c6625302\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.451002 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2675815e-6913-42b9-a7fa-1167c6625302-secret-volume\") pod \"collect-profiles-29413080-tp25b\" (UID: \"2675815e-6913-42b9-a7fa-1167c6625302\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.459019 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bp59s\" (UniqueName: \"kubernetes.io/projected/2675815e-6913-42b9-a7fa-1167c6625302-kube-api-access-bp59s\") pod \"collect-profiles-29413080-tp25b\" (UID: \"2675815e-6913-42b9-a7fa-1167c6625302\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.471264 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.696586 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b"] Dec 03 18:00:00 crc kubenswrapper[5002]: I1203 18:00:00.764810 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" event={"ID":"2675815e-6913-42b9-a7fa-1167c6625302","Type":"ContainerStarted","Data":"41d097fc055ed0cd9658f456f82c746500e40889483562747b2ddb56f4dda9c7"} Dec 03 18:00:01 crc kubenswrapper[5002]: I1203 18:00:01.774275 5002 generic.go:334] "Generic (PLEG): container finished" podID="2675815e-6913-42b9-a7fa-1167c6625302" containerID="7572c9ee21b2af547fa6efb2b710779f386ce0451994cb835862731f8d685c52" exitCode=0 Dec 03 18:00:01 crc kubenswrapper[5002]: I1203 18:00:01.774386 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" event={"ID":"2675815e-6913-42b9-a7fa-1167c6625302","Type":"ContainerDied","Data":"7572c9ee21b2af547fa6efb2b710779f386ce0451994cb835862731f8d685c52"} Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.038656 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.179344 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bp59s\" (UniqueName: \"kubernetes.io/projected/2675815e-6913-42b9-a7fa-1167c6625302-kube-api-access-bp59s\") pod \"2675815e-6913-42b9-a7fa-1167c6625302\" (UID: \"2675815e-6913-42b9-a7fa-1167c6625302\") " Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.179627 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2675815e-6913-42b9-a7fa-1167c6625302-secret-volume\") pod \"2675815e-6913-42b9-a7fa-1167c6625302\" (UID: \"2675815e-6913-42b9-a7fa-1167c6625302\") " Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.179709 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2675815e-6913-42b9-a7fa-1167c6625302-config-volume\") pod \"2675815e-6913-42b9-a7fa-1167c6625302\" (UID: \"2675815e-6913-42b9-a7fa-1167c6625302\") " Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.180630 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2675815e-6913-42b9-a7fa-1167c6625302-config-volume" (OuterVolumeSpecName: "config-volume") pod "2675815e-6913-42b9-a7fa-1167c6625302" (UID: "2675815e-6913-42b9-a7fa-1167c6625302"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.185636 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2675815e-6913-42b9-a7fa-1167c6625302-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2675815e-6913-42b9-a7fa-1167c6625302" (UID: "2675815e-6913-42b9-a7fa-1167c6625302"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.185820 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2675815e-6913-42b9-a7fa-1167c6625302-kube-api-access-bp59s" (OuterVolumeSpecName: "kube-api-access-bp59s") pod "2675815e-6913-42b9-a7fa-1167c6625302" (UID: "2675815e-6913-42b9-a7fa-1167c6625302"). InnerVolumeSpecName "kube-api-access-bp59s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.281066 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bp59s\" (UniqueName: \"kubernetes.io/projected/2675815e-6913-42b9-a7fa-1167c6625302-kube-api-access-bp59s\") on node \"crc\" DevicePath \"\"" Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.281119 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2675815e-6913-42b9-a7fa-1167c6625302-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.281142 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2675815e-6913-42b9-a7fa-1167c6625302-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.794289 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" event={"ID":"2675815e-6913-42b9-a7fa-1167c6625302","Type":"ContainerDied","Data":"41d097fc055ed0cd9658f456f82c746500e40889483562747b2ddb56f4dda9c7"} Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.794348 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41d097fc055ed0cd9658f456f82c746500e40889483562747b2ddb56f4dda9c7" Dec 03 18:00:03 crc kubenswrapper[5002]: I1203 18:00:03.794416 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413080-tp25b" Dec 03 18:00:04 crc kubenswrapper[5002]: I1203 18:00:04.109588 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw"] Dec 03 18:00:04 crc kubenswrapper[5002]: I1203 18:00:04.117362 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413035-5lmtw"] Dec 03 18:00:04 crc kubenswrapper[5002]: I1203 18:00:04.851954 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e5e5b3d-426f-4d32-9012-b281bf5bf8b6" path="/var/lib/kubelet/pods/9e5e5b3d-426f-4d32-9012-b281bf5bf8b6/volumes" Dec 03 18:00:09 crc kubenswrapper[5002]: I1203 18:00:09.840941 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:00:09 crc kubenswrapper[5002]: E1203 18:00:09.841827 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:00:22 crc kubenswrapper[5002]: I1203 18:00:22.840674 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:00:22 crc kubenswrapper[5002]: E1203 18:00:22.842148 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:00:33 crc kubenswrapper[5002]: I1203 18:00:33.840562 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:00:33 crc kubenswrapper[5002]: E1203 18:00:33.841341 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:00:41 crc kubenswrapper[5002]: I1203 18:00:41.352461 5002 scope.go:117] "RemoveContainer" containerID="154a41ad0cbc51427ac65a9e61f147c65d0577f6d0a3ae81df45c7247d68a609" Dec 03 18:00:48 crc kubenswrapper[5002]: I1203 18:00:48.840127 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:00:48 crc kubenswrapper[5002]: E1203 18:00:48.840860 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:01:02 crc kubenswrapper[5002]: I1203 18:01:02.840524 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:01:02 crc kubenswrapper[5002]: E1203 18:01:02.841389 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:01:15 crc kubenswrapper[5002]: I1203 18:01:15.841520 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:01:15 crc kubenswrapper[5002]: E1203 18:01:15.842307 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.226720 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Dec 03 18:01:29 crc kubenswrapper[5002]: E1203 18:01:29.228730 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2675815e-6913-42b9-a7fa-1167c6625302" containerName="collect-profiles" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.228874 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2675815e-6913-42b9-a7fa-1167c6625302" containerName="collect-profiles" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.229138 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2675815e-6913-42b9-a7fa-1167c6625302" containerName="collect-profiles" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.230691 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.236140 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.241043 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-vk84c" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.342802 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sst5w\" (UniqueName: \"kubernetes.io/projected/9e10bc3b-b369-48e1-981a-81b8f5286964-kube-api-access-sst5w\") pod \"mariadb-copy-data\" (UID: \"9e10bc3b-b369-48e1-981a-81b8f5286964\") " pod="openstack/mariadb-copy-data" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.343115 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fee406f6-7042-4693-b32a-db6c89627663\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fee406f6-7042-4693-b32a-db6c89627663\") pod \"mariadb-copy-data\" (UID: \"9e10bc3b-b369-48e1-981a-81b8f5286964\") " pod="openstack/mariadb-copy-data" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.447327 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sst5w\" (UniqueName: \"kubernetes.io/projected/9e10bc3b-b369-48e1-981a-81b8f5286964-kube-api-access-sst5w\") pod \"mariadb-copy-data\" (UID: \"9e10bc3b-b369-48e1-981a-81b8f5286964\") " pod="openstack/mariadb-copy-data" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.447392 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fee406f6-7042-4693-b32a-db6c89627663\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fee406f6-7042-4693-b32a-db6c89627663\") pod \"mariadb-copy-data\" (UID: \"9e10bc3b-b369-48e1-981a-81b8f5286964\") " pod="openstack/mariadb-copy-data" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.454298 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.454362 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fee406f6-7042-4693-b32a-db6c89627663\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fee406f6-7042-4693-b32a-db6c89627663\") pod \"mariadb-copy-data\" (UID: \"9e10bc3b-b369-48e1-981a-81b8f5286964\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b540c1430e57491b3062f91aed2c943e799e08877ad7c778954fa79813744c26/globalmount\"" pod="openstack/mariadb-copy-data" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.473333 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sst5w\" (UniqueName: \"kubernetes.io/projected/9e10bc3b-b369-48e1-981a-81b8f5286964-kube-api-access-sst5w\") pod \"mariadb-copy-data\" (UID: \"9e10bc3b-b369-48e1-981a-81b8f5286964\") " pod="openstack/mariadb-copy-data" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.489733 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fee406f6-7042-4693-b32a-db6c89627663\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fee406f6-7042-4693-b32a-db6c89627663\") pod \"mariadb-copy-data\" (UID: \"9e10bc3b-b369-48e1-981a-81b8f5286964\") " pod="openstack/mariadb-copy-data" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.555062 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Dec 03 18:01:29 crc kubenswrapper[5002]: I1203 18:01:29.840349 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:01:29 crc kubenswrapper[5002]: E1203 18:01:29.840967 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:01:30 crc kubenswrapper[5002]: I1203 18:01:30.084661 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Dec 03 18:01:30 crc kubenswrapper[5002]: W1203 18:01:30.089466 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e10bc3b_b369_48e1_981a_81b8f5286964.slice/crio-fe466bcf3199f0ebd1410400ff34b36bad0b974b800fe283c4bab19f6ffcc1a0 WatchSource:0}: Error finding container fe466bcf3199f0ebd1410400ff34b36bad0b974b800fe283c4bab19f6ffcc1a0: Status 404 returned error can't find the container with id fe466bcf3199f0ebd1410400ff34b36bad0b974b800fe283c4bab19f6ffcc1a0 Dec 03 18:01:30 crc kubenswrapper[5002]: I1203 18:01:30.475152 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"9e10bc3b-b369-48e1-981a-81b8f5286964","Type":"ContainerStarted","Data":"02250ee5271daaa9a6e2587323f74d81bc32a005ba12bf6804758df7a308ab86"} Dec 03 18:01:30 crc kubenswrapper[5002]: I1203 18:01:30.475204 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"9e10bc3b-b369-48e1-981a-81b8f5286964","Type":"ContainerStarted","Data":"fe466bcf3199f0ebd1410400ff34b36bad0b974b800fe283c4bab19f6ffcc1a0"} Dec 03 18:01:30 crc kubenswrapper[5002]: I1203 18:01:30.492727 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=2.492706594 podStartE2EDuration="2.492706594s" podCreationTimestamp="2025-12-03 18:01:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:01:30.488157922 +0000 UTC m=+5413.901979830" watchObservedRunningTime="2025-12-03 18:01:30.492706594 +0000 UTC m=+5413.906528502" Dec 03 18:01:33 crc kubenswrapper[5002]: I1203 18:01:33.488096 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Dec 03 18:01:33 crc kubenswrapper[5002]: I1203 18:01:33.489947 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 03 18:01:33 crc kubenswrapper[5002]: I1203 18:01:33.497410 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Dec 03 18:01:33 crc kubenswrapper[5002]: I1203 18:01:33.610448 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7q2h\" (UniqueName: \"kubernetes.io/projected/dee7f62c-0cf0-403f-9df5-60d4dce314da-kube-api-access-c7q2h\") pod \"mariadb-client\" (UID: \"dee7f62c-0cf0-403f-9df5-60d4dce314da\") " pod="openstack/mariadb-client" Dec 03 18:01:33 crc kubenswrapper[5002]: I1203 18:01:33.712024 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7q2h\" (UniqueName: \"kubernetes.io/projected/dee7f62c-0cf0-403f-9df5-60d4dce314da-kube-api-access-c7q2h\") pod \"mariadb-client\" (UID: \"dee7f62c-0cf0-403f-9df5-60d4dce314da\") " pod="openstack/mariadb-client" Dec 03 18:01:33 crc kubenswrapper[5002]: I1203 18:01:33.736727 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7q2h\" (UniqueName: \"kubernetes.io/projected/dee7f62c-0cf0-403f-9df5-60d4dce314da-kube-api-access-c7q2h\") pod \"mariadb-client\" (UID: \"dee7f62c-0cf0-403f-9df5-60d4dce314da\") " pod="openstack/mariadb-client" Dec 03 18:01:33 crc kubenswrapper[5002]: I1203 18:01:33.818612 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 03 18:01:34 crc kubenswrapper[5002]: I1203 18:01:34.246120 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Dec 03 18:01:34 crc kubenswrapper[5002]: I1203 18:01:34.509427 5002 generic.go:334] "Generic (PLEG): container finished" podID="dee7f62c-0cf0-403f-9df5-60d4dce314da" containerID="01f0925c64c38da0fbc7a703cf722fe7ab0d34672351d740b0a58f3875c9e8ed" exitCode=0 Dec 03 18:01:34 crc kubenswrapper[5002]: I1203 18:01:34.509487 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"dee7f62c-0cf0-403f-9df5-60d4dce314da","Type":"ContainerDied","Data":"01f0925c64c38da0fbc7a703cf722fe7ab0d34672351d740b0a58f3875c9e8ed"} Dec 03 18:01:34 crc kubenswrapper[5002]: I1203 18:01:34.509517 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"dee7f62c-0cf0-403f-9df5-60d4dce314da","Type":"ContainerStarted","Data":"c1986514a9076eaf40821077952cc179bb27fc60f97e3101afa267c28ef8dea6"} Dec 03 18:01:35 crc kubenswrapper[5002]: I1203 18:01:35.815546 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 03 18:01:35 crc kubenswrapper[5002]: I1203 18:01:35.838191 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_dee7f62c-0cf0-403f-9df5-60d4dce314da/mariadb-client/0.log" Dec 03 18:01:35 crc kubenswrapper[5002]: I1203 18:01:35.864412 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Dec 03 18:01:35 crc kubenswrapper[5002]: I1203 18:01:35.872624 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Dec 03 18:01:35 crc kubenswrapper[5002]: I1203 18:01:35.956521 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7q2h\" (UniqueName: \"kubernetes.io/projected/dee7f62c-0cf0-403f-9df5-60d4dce314da-kube-api-access-c7q2h\") pod \"dee7f62c-0cf0-403f-9df5-60d4dce314da\" (UID: \"dee7f62c-0cf0-403f-9df5-60d4dce314da\") " Dec 03 18:01:35 crc kubenswrapper[5002]: I1203 18:01:35.962185 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dee7f62c-0cf0-403f-9df5-60d4dce314da-kube-api-access-c7q2h" (OuterVolumeSpecName: "kube-api-access-c7q2h") pod "dee7f62c-0cf0-403f-9df5-60d4dce314da" (UID: "dee7f62c-0cf0-403f-9df5-60d4dce314da"). InnerVolumeSpecName "kube-api-access-c7q2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:01:35 crc kubenswrapper[5002]: I1203 18:01:35.996452 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Dec 03 18:01:35 crc kubenswrapper[5002]: E1203 18:01:35.998115 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dee7f62c-0cf0-403f-9df5-60d4dce314da" containerName="mariadb-client" Dec 03 18:01:35 crc kubenswrapper[5002]: I1203 18:01:35.998322 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="dee7f62c-0cf0-403f-9df5-60d4dce314da" containerName="mariadb-client" Dec 03 18:01:35 crc kubenswrapper[5002]: I1203 18:01:35.998500 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="dee7f62c-0cf0-403f-9df5-60d4dce314da" containerName="mariadb-client" Dec 03 18:01:35 crc kubenswrapper[5002]: I1203 18:01:35.999173 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 03 18:01:36 crc kubenswrapper[5002]: I1203 18:01:36.002631 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Dec 03 18:01:36 crc kubenswrapper[5002]: I1203 18:01:36.058611 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7q2h\" (UniqueName: \"kubernetes.io/projected/dee7f62c-0cf0-403f-9df5-60d4dce314da-kube-api-access-c7q2h\") on node \"crc\" DevicePath \"\"" Dec 03 18:01:36 crc kubenswrapper[5002]: I1203 18:01:36.160605 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7rn7\" (UniqueName: \"kubernetes.io/projected/8cf6f188-759d-4771-8264-3ca240504686-kube-api-access-w7rn7\") pod \"mariadb-client\" (UID: \"8cf6f188-759d-4771-8264-3ca240504686\") " pod="openstack/mariadb-client" Dec 03 18:01:36 crc kubenswrapper[5002]: I1203 18:01:36.262464 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7rn7\" (UniqueName: \"kubernetes.io/projected/8cf6f188-759d-4771-8264-3ca240504686-kube-api-access-w7rn7\") pod \"mariadb-client\" (UID: \"8cf6f188-759d-4771-8264-3ca240504686\") " pod="openstack/mariadb-client" Dec 03 18:01:36 crc kubenswrapper[5002]: I1203 18:01:36.282384 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7rn7\" (UniqueName: \"kubernetes.io/projected/8cf6f188-759d-4771-8264-3ca240504686-kube-api-access-w7rn7\") pod \"mariadb-client\" (UID: \"8cf6f188-759d-4771-8264-3ca240504686\") " pod="openstack/mariadb-client" Dec 03 18:01:36 crc kubenswrapper[5002]: I1203 18:01:36.319084 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 03 18:01:36 crc kubenswrapper[5002]: I1203 18:01:36.529245 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Dec 03 18:01:36 crc kubenswrapper[5002]: W1203 18:01:36.539162 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8cf6f188_759d_4771_8264_3ca240504686.slice/crio-6259a43c177c3eaead057fd77a3daa84b0453a06b9ee4d71875b59ff7390ae62 WatchSource:0}: Error finding container 6259a43c177c3eaead057fd77a3daa84b0453a06b9ee4d71875b59ff7390ae62: Status 404 returned error can't find the container with id 6259a43c177c3eaead057fd77a3daa84b0453a06b9ee4d71875b59ff7390ae62 Dec 03 18:01:36 crc kubenswrapper[5002]: I1203 18:01:36.539733 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1986514a9076eaf40821077952cc179bb27fc60f97e3101afa267c28ef8dea6" Dec 03 18:01:36 crc kubenswrapper[5002]: I1203 18:01:36.539800 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 03 18:01:36 crc kubenswrapper[5002]: I1203 18:01:36.557903 5002 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="dee7f62c-0cf0-403f-9df5-60d4dce314da" podUID="8cf6f188-759d-4771-8264-3ca240504686" Dec 03 18:01:36 crc kubenswrapper[5002]: I1203 18:01:36.855705 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dee7f62c-0cf0-403f-9df5-60d4dce314da" path="/var/lib/kubelet/pods/dee7f62c-0cf0-403f-9df5-60d4dce314da/volumes" Dec 03 18:01:37 crc kubenswrapper[5002]: I1203 18:01:37.555491 5002 generic.go:334] "Generic (PLEG): container finished" podID="8cf6f188-759d-4771-8264-3ca240504686" containerID="674ea34615849135115aabcb26e347be3e3e17a5b5a2aeb71f87d71cac8846ff" exitCode=0 Dec 03 18:01:37 crc kubenswrapper[5002]: I1203 18:01:37.555558 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"8cf6f188-759d-4771-8264-3ca240504686","Type":"ContainerDied","Data":"674ea34615849135115aabcb26e347be3e3e17a5b5a2aeb71f87d71cac8846ff"} Dec 03 18:01:37 crc kubenswrapper[5002]: I1203 18:01:37.555606 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"8cf6f188-759d-4771-8264-3ca240504686","Type":"ContainerStarted","Data":"6259a43c177c3eaead057fd77a3daa84b0453a06b9ee4d71875b59ff7390ae62"} Dec 03 18:01:38 crc kubenswrapper[5002]: I1203 18:01:38.876367 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 03 18:01:38 crc kubenswrapper[5002]: I1203 18:01:38.895807 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_8cf6f188-759d-4771-8264-3ca240504686/mariadb-client/0.log" Dec 03 18:01:38 crc kubenswrapper[5002]: I1203 18:01:38.921383 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Dec 03 18:01:38 crc kubenswrapper[5002]: I1203 18:01:38.926999 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Dec 03 18:01:39 crc kubenswrapper[5002]: I1203 18:01:39.007072 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7rn7\" (UniqueName: \"kubernetes.io/projected/8cf6f188-759d-4771-8264-3ca240504686-kube-api-access-w7rn7\") pod \"8cf6f188-759d-4771-8264-3ca240504686\" (UID: \"8cf6f188-759d-4771-8264-3ca240504686\") " Dec 03 18:01:39 crc kubenswrapper[5002]: I1203 18:01:39.014069 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cf6f188-759d-4771-8264-3ca240504686-kube-api-access-w7rn7" (OuterVolumeSpecName: "kube-api-access-w7rn7") pod "8cf6f188-759d-4771-8264-3ca240504686" (UID: "8cf6f188-759d-4771-8264-3ca240504686"). InnerVolumeSpecName "kube-api-access-w7rn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:01:39 crc kubenswrapper[5002]: I1203 18:01:39.109413 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7rn7\" (UniqueName: \"kubernetes.io/projected/8cf6f188-759d-4771-8264-3ca240504686-kube-api-access-w7rn7\") on node \"crc\" DevicePath \"\"" Dec 03 18:01:39 crc kubenswrapper[5002]: I1203 18:01:39.578954 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6259a43c177c3eaead057fd77a3daa84b0453a06b9ee4d71875b59ff7390ae62" Dec 03 18:01:39 crc kubenswrapper[5002]: I1203 18:01:39.579131 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 03 18:01:40 crc kubenswrapper[5002]: I1203 18:01:40.839939 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:01:40 crc kubenswrapper[5002]: E1203 18:01:40.840316 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:01:40 crc kubenswrapper[5002]: I1203 18:01:40.851581 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cf6f188-759d-4771-8264-3ca240504686" path="/var/lib/kubelet/pods/8cf6f188-759d-4771-8264-3ca240504686/volumes" Dec 03 18:01:54 crc kubenswrapper[5002]: I1203 18:01:54.840946 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:01:54 crc kubenswrapper[5002]: E1203 18:01:54.841839 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:02:08 crc kubenswrapper[5002]: I1203 18:02:08.840200 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:02:08 crc kubenswrapper[5002]: E1203 18:02:08.841142 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.828922 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 18:02:15 crc kubenswrapper[5002]: E1203 18:02:15.829900 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cf6f188-759d-4771-8264-3ca240504686" containerName="mariadb-client" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.829919 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cf6f188-759d-4771-8264-3ca240504686" containerName="mariadb-client" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.830117 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cf6f188-759d-4771-8264-3ca240504686" containerName="mariadb-client" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.831167 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.835005 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.836162 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-mb6j4" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.836522 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.836689 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.851631 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.856481 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.858004 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.866492 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.868665 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.906711 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.915487 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.928194 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.951438 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa833e23-a37d-488c-9f37-24d66c288ab9-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.951499 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5941e25f-61d8-452f-a0f9-12ecbda9c076\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5941e25f-61d8-452f-a0f9-12ecbda9c076\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.951525 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-542a482d-511b-4095-90a0-09383616114f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-542a482d-511b-4095-90a0-09383616114f\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.951560 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/73ec4b95-58eb-46f5-95af-b46497c25bf6-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.951601 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a94b07d-4cab-49af-bc42-c8dff252abc6-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.951615 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73ec4b95-58eb-46f5-95af-b46497c25bf6-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.951636 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a94b07d-4cab-49af-bc42-c8dff252abc6-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.951657 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ec4b95-58eb-46f5-95af-b46497c25bf6-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.951730 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x9hf\" (UniqueName: \"kubernetes.io/projected/fa833e23-a37d-488c-9f37-24d66c288ab9-kube-api-access-8x9hf\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.952255 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwcjn\" (UniqueName: \"kubernetes.io/projected/73ec4b95-58eb-46f5-95af-b46497c25bf6-kube-api-access-zwcjn\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.952446 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa833e23-a37d-488c-9f37-24d66c288ab9-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.952565 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a94b07d-4cab-49af-bc42-c8dff252abc6-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.952781 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a94b07d-4cab-49af-bc42-c8dff252abc6-config\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.952832 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfblr\" (UniqueName: \"kubernetes.io/projected/2a94b07d-4cab-49af-bc42-c8dff252abc6-kube-api-access-rfblr\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.952871 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a94b07d-4cab-49af-bc42-c8dff252abc6-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.952919 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa833e23-a37d-488c-9f37-24d66c288ab9-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.952946 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2a94b07d-4cab-49af-bc42-c8dff252abc6-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.953007 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa833e23-a37d-488c-9f37-24d66c288ab9-config\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.953083 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c3e22d43-dec5-4604-832e-9b7eeaaacec8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c3e22d43-dec5-4604-832e-9b7eeaaacec8\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.953111 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa833e23-a37d-488c-9f37-24d66c288ab9-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.953168 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ec4b95-58eb-46f5-95af-b46497c25bf6-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.953214 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73ec4b95-58eb-46f5-95af-b46497c25bf6-config\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.953244 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fa833e23-a37d-488c-9f37-24d66c288ab9-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:15 crc kubenswrapper[5002]: I1203 18:02:15.953272 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ec4b95-58eb-46f5-95af-b46497c25bf6-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.054872 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a94b07d-4cab-49af-bc42-c8dff252abc6-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.055088 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73ec4b95-58eb-46f5-95af-b46497c25bf6-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.055178 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a94b07d-4cab-49af-bc42-c8dff252abc6-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.055251 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ec4b95-58eb-46f5-95af-b46497c25bf6-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.055333 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x9hf\" (UniqueName: \"kubernetes.io/projected/fa833e23-a37d-488c-9f37-24d66c288ab9-kube-api-access-8x9hf\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.055407 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwcjn\" (UniqueName: \"kubernetes.io/projected/73ec4b95-58eb-46f5-95af-b46497c25bf6-kube-api-access-zwcjn\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.055500 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa833e23-a37d-488c-9f37-24d66c288ab9-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.055574 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a94b07d-4cab-49af-bc42-c8dff252abc6-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.055652 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a94b07d-4cab-49af-bc42-c8dff252abc6-config\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.055738 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfblr\" (UniqueName: \"kubernetes.io/projected/2a94b07d-4cab-49af-bc42-c8dff252abc6-kube-api-access-rfblr\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.055851 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a94b07d-4cab-49af-bc42-c8dff252abc6-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.055931 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa833e23-a37d-488c-9f37-24d66c288ab9-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.055998 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2a94b07d-4cab-49af-bc42-c8dff252abc6-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056067 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa833e23-a37d-488c-9f37-24d66c288ab9-config\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056154 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c3e22d43-dec5-4604-832e-9b7eeaaacec8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c3e22d43-dec5-4604-832e-9b7eeaaacec8\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056226 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa833e23-a37d-488c-9f37-24d66c288ab9-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056273 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73ec4b95-58eb-46f5-95af-b46497c25bf6-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056367 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ec4b95-58eb-46f5-95af-b46497c25bf6-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056439 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73ec4b95-58eb-46f5-95af-b46497c25bf6-config\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056510 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fa833e23-a37d-488c-9f37-24d66c288ab9-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056580 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ec4b95-58eb-46f5-95af-b46497c25bf6-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056658 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa833e23-a37d-488c-9f37-24d66c288ab9-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056732 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5941e25f-61d8-452f-a0f9-12ecbda9c076\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5941e25f-61d8-452f-a0f9-12ecbda9c076\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056846 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-542a482d-511b-4095-90a0-09383616114f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-542a482d-511b-4095-90a0-09383616114f\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056933 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/73ec4b95-58eb-46f5-95af-b46497c25bf6-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056168 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a94b07d-4cab-49af-bc42-c8dff252abc6-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.057539 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/73ec4b95-58eb-46f5-95af-b46497c25bf6-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.057903 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a94b07d-4cab-49af-bc42-c8dff252abc6-config\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.058060 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa833e23-a37d-488c-9f37-24d66c288ab9-config\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.058142 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73ec4b95-58eb-46f5-95af-b46497c25bf6-config\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.058168 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fa833e23-a37d-488c-9f37-24d66c288ab9-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.058356 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa833e23-a37d-488c-9f37-24d66c288ab9-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.056615 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2a94b07d-4cab-49af-bc42-c8dff252abc6-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.061948 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.061997 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-542a482d-511b-4095-90a0-09383616114f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-542a482d-511b-4095-90a0-09383616114f\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/805052adc45c1d53cc086b4f5b753347e976a43cd93d04ffcb3f07ba155dc684/globalmount\"" pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.062322 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.062450 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.062540 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5941e25f-61d8-452f-a0f9-12ecbda9c076\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5941e25f-61d8-452f-a0f9-12ecbda9c076\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a3ba4c55ff9edd3e5fa9bc252e2dc710bb7880503d11cb3ce265457dddddb6d0/globalmount\"" pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.062455 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c3e22d43-dec5-4604-832e-9b7eeaaacec8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c3e22d43-dec5-4604-832e-9b7eeaaacec8\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d7be64d9d019529db191ce263f19befa58bf506da4e3f5cf2c5ba671ecb9c727/globalmount\"" pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.062553 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa833e23-a37d-488c-9f37-24d66c288ab9-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.063126 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ec4b95-58eb-46f5-95af-b46497c25bf6-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.063147 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa833e23-a37d-488c-9f37-24d66c288ab9-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.063980 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ec4b95-58eb-46f5-95af-b46497c25bf6-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.066493 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ec4b95-58eb-46f5-95af-b46497c25bf6-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.067274 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a94b07d-4cab-49af-bc42-c8dff252abc6-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.069398 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a94b07d-4cab-49af-bc42-c8dff252abc6-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.075569 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa833e23-a37d-488c-9f37-24d66c288ab9-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.075870 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a94b07d-4cab-49af-bc42-c8dff252abc6-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.078933 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x9hf\" (UniqueName: \"kubernetes.io/projected/fa833e23-a37d-488c-9f37-24d66c288ab9-kube-api-access-8x9hf\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.081584 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwcjn\" (UniqueName: \"kubernetes.io/projected/73ec4b95-58eb-46f5-95af-b46497c25bf6-kube-api-access-zwcjn\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.082574 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfblr\" (UniqueName: \"kubernetes.io/projected/2a94b07d-4cab-49af-bc42-c8dff252abc6-kube-api-access-rfblr\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.094946 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-542a482d-511b-4095-90a0-09383616114f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-542a482d-511b-4095-90a0-09383616114f\") pod \"ovsdbserver-nb-0\" (UID: \"73ec4b95-58eb-46f5-95af-b46497c25bf6\") " pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.095070 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5941e25f-61d8-452f-a0f9-12ecbda9c076\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5941e25f-61d8-452f-a0f9-12ecbda9c076\") pod \"ovsdbserver-nb-1\" (UID: \"2a94b07d-4cab-49af-bc42-c8dff252abc6\") " pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.098895 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c3e22d43-dec5-4604-832e-9b7eeaaacec8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c3e22d43-dec5-4604-832e-9b7eeaaacec8\") pod \"ovsdbserver-nb-2\" (UID: \"fa833e23-a37d-488c-9f37-24d66c288ab9\") " pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.152855 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.179423 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.189356 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.665895 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.756258 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Dec 03 18:02:16 crc kubenswrapper[5002]: W1203 18:02:16.760445 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a94b07d_4cab_49af_bc42_c8dff252abc6.slice/crio-9a9b1e0e15bdc294933ced8e5c42ab202b58cffb7377f933c9e1d9c825ca5849 WatchSource:0}: Error finding container 9a9b1e0e15bdc294933ced8e5c42ab202b58cffb7377f933c9e1d9c825ca5849: Status 404 returned error can't find the container with id 9a9b1e0e15bdc294933ced8e5c42ab202b58cffb7377f933c9e1d9c825ca5849 Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.915542 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"73ec4b95-58eb-46f5-95af-b46497c25bf6","Type":"ContainerStarted","Data":"9c9cf5f3926885c5de2e95f0069533526df04a861b1853fd7dacdf7bf611013d"} Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.917035 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"2a94b07d-4cab-49af-bc42-c8dff252abc6","Type":"ContainerStarted","Data":"9a9b1e0e15bdc294933ced8e5c42ab202b58cffb7377f933c9e1d9c825ca5849"} Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.936507 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.940675 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.944402 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.944683 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-dks49" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.944885 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.945231 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.948566 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.961357 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.962879 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.970116 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.971824 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:16 crc kubenswrapper[5002]: I1203 18:02:16.989338 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.000174 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.076075 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r878p\" (UniqueName: \"kubernetes.io/projected/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-kube-api-access-r878p\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.076342 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.076424 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d21b1d77-1894-4df5-954c-35c9eb4e7780-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.076503 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2deea3ab-832b-4def-9ef6-a4441a8f7eb0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2deea3ab-832b-4def-9ef6-a4441a8f7eb0\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.076592 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-config\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.076677 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d21b1d77-1894-4df5-954c-35c9eb4e7780-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.076859 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.076933 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.076997 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d21b1d77-1894-4df5-954c-35c9eb4e7780-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.077030 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d21b1d77-1894-4df5-954c-35c9eb4e7780-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.077091 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d21b1d77-1894-4df5-954c-35c9eb4e7780-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.077248 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpvkd\" (UniqueName: \"kubernetes.io/projected/d21b1d77-1894-4df5-954c-35c9eb4e7780-kube-api-access-rpvkd\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.077356 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.077430 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d21b1d77-1894-4df5-954c-35c9eb4e7780-config\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.077474 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f46e38b3-c45a-4b8f-a0d5-35ad4f05b8a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f46e38b3-c45a-4b8f-a0d5-35ad4f05b8a9\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.077498 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.179492 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58360bc1-f032-4bdd-b8c5-e2250f44e965-config\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.179895 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/58360bc1-f032-4bdd-b8c5-e2250f44e965-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.179967 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182124 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d390a03c-fd7d-4ac1-979c-f63676b3c27b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d390a03c-fd7d-4ac1-979c-f63676b3c27b\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182213 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d21b1d77-1894-4df5-954c-35c9eb4e7780-config\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182256 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f46e38b3-c45a-4b8f-a0d5-35ad4f05b8a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f46e38b3-c45a-4b8f-a0d5-35ad4f05b8a9\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182291 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182406 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r878p\" (UniqueName: \"kubernetes.io/projected/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-kube-api-access-r878p\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182449 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182484 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d21b1d77-1894-4df5-954c-35c9eb4e7780-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182512 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2deea3ab-832b-4def-9ef6-a4441a8f7eb0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2deea3ab-832b-4def-9ef6-a4441a8f7eb0\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182538 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-config\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182579 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d21b1d77-1894-4df5-954c-35c9eb4e7780-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182601 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182638 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182656 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/58360bc1-f032-4bdd-b8c5-e2250f44e965-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182687 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d21b1d77-1894-4df5-954c-35c9eb4e7780-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182723 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d21b1d77-1894-4df5-954c-35c9eb4e7780-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182771 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d21b1d77-1894-4df5-954c-35c9eb4e7780-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182801 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2qnf\" (UniqueName: \"kubernetes.io/projected/58360bc1-f032-4bdd-b8c5-e2250f44e965-kube-api-access-r2qnf\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182843 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/58360bc1-f032-4bdd-b8c5-e2250f44e965-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182880 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58360bc1-f032-4bdd-b8c5-e2250f44e965-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182938 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpvkd\" (UniqueName: \"kubernetes.io/projected/d21b1d77-1894-4df5-954c-35c9eb4e7780-kube-api-access-rpvkd\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.182960 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/58360bc1-f032-4bdd-b8c5-e2250f44e965-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.183220 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d21b1d77-1894-4df5-954c-35c9eb4e7780-config\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.183646 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.184010 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d21b1d77-1894-4df5-954c-35c9eb4e7780-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.185546 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.185731 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-config\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.187702 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.187736 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f46e38b3-c45a-4b8f-a0d5-35ad4f05b8a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f46e38b3-c45a-4b8f-a0d5-35ad4f05b8a9\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/06fb0766fd29aef06f4124b70610e3645a4de4b4feaa36b0d2f5795bfdd45478/globalmount\"" pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.187947 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d21b1d77-1894-4df5-954c-35c9eb4e7780-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.188003 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.188028 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2deea3ab-832b-4def-9ef6-a4441a8f7eb0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2deea3ab-832b-4def-9ef6-a4441a8f7eb0\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/667d16d1b3b93ddf2608f1878ec8119224e03b0ade31227a7cdf3765b373d92d/globalmount\"" pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.188675 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.194124 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.195022 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d21b1d77-1894-4df5-954c-35c9eb4e7780-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.195161 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d21b1d77-1894-4df5-954c-35c9eb4e7780-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.202424 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.208787 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d21b1d77-1894-4df5-954c-35c9eb4e7780-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.214380 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpvkd\" (UniqueName: \"kubernetes.io/projected/d21b1d77-1894-4df5-954c-35c9eb4e7780-kube-api-access-rpvkd\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.215874 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r878p\" (UniqueName: \"kubernetes.io/projected/f99a14c5-cbbd-4f08-aff8-3c8e921a0850-kube-api-access-r878p\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.236140 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2deea3ab-832b-4def-9ef6-a4441a8f7eb0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2deea3ab-832b-4def-9ef6-a4441a8f7eb0\") pod \"ovsdbserver-sb-2\" (UID: \"d21b1d77-1894-4df5-954c-35c9eb4e7780\") " pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.250607 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f46e38b3-c45a-4b8f-a0d5-35ad4f05b8a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f46e38b3-c45a-4b8f-a0d5-35ad4f05b8a9\") pod \"ovsdbserver-sb-0\" (UID: \"f99a14c5-cbbd-4f08-aff8-3c8e921a0850\") " pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.262544 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.286639 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/58360bc1-f032-4bdd-b8c5-e2250f44e965-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.286717 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58360bc1-f032-4bdd-b8c5-e2250f44e965-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.286772 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/58360bc1-f032-4bdd-b8c5-e2250f44e965-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.286800 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58360bc1-f032-4bdd-b8c5-e2250f44e965-config\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.286818 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/58360bc1-f032-4bdd-b8c5-e2250f44e965-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.286847 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d390a03c-fd7d-4ac1-979c-f63676b3c27b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d390a03c-fd7d-4ac1-979c-f63676b3c27b\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.286933 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/58360bc1-f032-4bdd-b8c5-e2250f44e965-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.286963 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2qnf\" (UniqueName: \"kubernetes.io/projected/58360bc1-f032-4bdd-b8c5-e2250f44e965-kube-api-access-r2qnf\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.287262 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/58360bc1-f032-4bdd-b8c5-e2250f44e965-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.287707 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58360bc1-f032-4bdd-b8c5-e2250f44e965-config\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.288340 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/58360bc1-f032-4bdd-b8c5-e2250f44e965-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.288860 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.296736 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58360bc1-f032-4bdd-b8c5-e2250f44e965-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.296926 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/58360bc1-f032-4bdd-b8c5-e2250f44e965-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.297361 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/58360bc1-f032-4bdd-b8c5-e2250f44e965-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.297803 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.297869 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d390a03c-fd7d-4ac1-979c-f63676b3c27b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d390a03c-fd7d-4ac1-979c-f63676b3c27b\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/36d7dd8b41e1272d8acc8fca69e5198dc540531bc4f84b232a88fd14e0acac4f/globalmount\"" pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.315365 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2qnf\" (UniqueName: \"kubernetes.io/projected/58360bc1-f032-4bdd-b8c5-e2250f44e965-kube-api-access-r2qnf\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.336249 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d390a03c-fd7d-4ac1-979c-f63676b3c27b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d390a03c-fd7d-4ac1-979c-f63676b3c27b\") pod \"ovsdbserver-sb-1\" (UID: \"58360bc1-f032-4bdd-b8c5-e2250f44e965\") " pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.361497 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.599742 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.829537 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 18:02:17 crc kubenswrapper[5002]: W1203 18:02:17.839255 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf99a14c5_cbbd_4f08_aff8_3c8e921a0850.slice/crio-c7463e1e1bf1e35e158ceb2857a420b1b1eab218c6aca04d4c5a712299e24190 WatchSource:0}: Error finding container c7463e1e1bf1e35e158ceb2857a420b1b1eab218c6aca04d4c5a712299e24190: Status 404 returned error can't find the container with id c7463e1e1bf1e35e158ceb2857a420b1b1eab218c6aca04d4c5a712299e24190 Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.912677 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.930346 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"f99a14c5-cbbd-4f08-aff8-3c8e921a0850","Type":"ContainerStarted","Data":"c7463e1e1bf1e35e158ceb2857a420b1b1eab218c6aca04d4c5a712299e24190"} Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.932165 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"2a94b07d-4cab-49af-bc42-c8dff252abc6","Type":"ContainerStarted","Data":"417e0c26e63b49121ee224a8e1af1979a53618f519b1dbc74896ad9e3e6d3187"} Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.932202 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"2a94b07d-4cab-49af-bc42-c8dff252abc6","Type":"ContainerStarted","Data":"4928ac5fd9d394fae26c3435edcde104a3de422f2fa6b60b9787b29eeadf0a5d"} Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.933901 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"73ec4b95-58eb-46f5-95af-b46497c25bf6","Type":"ContainerStarted","Data":"43c6abceccfc743fde414217257c742e4adf1ca5cbfb1e7466df62c873470f6a"} Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.933948 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"73ec4b95-58eb-46f5-95af-b46497c25bf6","Type":"ContainerStarted","Data":"0db1c03ecbca05764416263b2822f1b6a008684bceac22e50300a0f3bba56306"} Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.934949 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"d21b1d77-1894-4df5-954c-35c9eb4e7780","Type":"ContainerStarted","Data":"b39c3454b544539f37db7d29a7172731a3c43eb3ff6de5d52bf1512d3831bfbb"} Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.936712 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"fa833e23-a37d-488c-9f37-24d66c288ab9","Type":"ContainerStarted","Data":"3b8a5b86c4ba614a14b2aecde7a80bc7ea43352a30d0063f1567fa51a14e9921"} Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.936778 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"fa833e23-a37d-488c-9f37-24d66c288ab9","Type":"ContainerStarted","Data":"4d1e7dff8881dc55caa30fdfe3e76830e36244d9b1c4729f53a03b53af6f6d22"} Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.936792 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"fa833e23-a37d-488c-9f37-24d66c288ab9","Type":"ContainerStarted","Data":"55140f9bb7ae06d9b635bd94d3f5216d4d06dc2902cc152358315cabf6103d88"} Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.984891 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.984865692 podStartE2EDuration="3.984865692s" podCreationTimestamp="2025-12-03 18:02:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:02:17.983388602 +0000 UTC m=+5461.397210490" watchObservedRunningTime="2025-12-03 18:02:17.984865692 +0000 UTC m=+5461.398687580" Dec 03 18:02:17 crc kubenswrapper[5002]: I1203 18:02:17.985891 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=3.98588109 podStartE2EDuration="3.98588109s" podCreationTimestamp="2025-12-03 18:02:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:02:17.954459155 +0000 UTC m=+5461.368281073" watchObservedRunningTime="2025-12-03 18:02:17.98588109 +0000 UTC m=+5461.399702978" Dec 03 18:02:18 crc kubenswrapper[5002]: I1203 18:02:18.126043 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Dec 03 18:02:18 crc kubenswrapper[5002]: W1203 18:02:18.134252 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58360bc1_f032_4bdd_b8c5_e2250f44e965.slice/crio-659d86ac72385210beed94bea7feb1ccece2d922fc2a1dbb95790002d2f2c8e5 WatchSource:0}: Error finding container 659d86ac72385210beed94bea7feb1ccece2d922fc2a1dbb95790002d2f2c8e5: Status 404 returned error can't find the container with id 659d86ac72385210beed94bea7feb1ccece2d922fc2a1dbb95790002d2f2c8e5 Dec 03 18:02:18 crc kubenswrapper[5002]: I1203 18:02:18.953416 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"d21b1d77-1894-4df5-954c-35c9eb4e7780","Type":"ContainerStarted","Data":"82f57f1074875097629a8d3595b971113b80a3c4d364f71fbd60ba062b19e799"} Dec 03 18:02:18 crc kubenswrapper[5002]: I1203 18:02:18.953472 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"d21b1d77-1894-4df5-954c-35c9eb4e7780","Type":"ContainerStarted","Data":"4d91d195f061007db1cc54916a04e79d8e9e0a158801ce80ef0251179ac71bc9"} Dec 03 18:02:18 crc kubenswrapper[5002]: I1203 18:02:18.961223 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"58360bc1-f032-4bdd-b8c5-e2250f44e965","Type":"ContainerStarted","Data":"db7dc9cdfbce03cbc1a34cb16eda364e3257d515d239115a6ef4f5a876db162a"} Dec 03 18:02:18 crc kubenswrapper[5002]: I1203 18:02:18.961276 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"58360bc1-f032-4bdd-b8c5-e2250f44e965","Type":"ContainerStarted","Data":"1e65f8f7c596ee14fd477e539aaaa23dcb27c69e70bf45a37bf83e9de8ccaa0c"} Dec 03 18:02:18 crc kubenswrapper[5002]: I1203 18:02:18.961317 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"58360bc1-f032-4bdd-b8c5-e2250f44e965","Type":"ContainerStarted","Data":"659d86ac72385210beed94bea7feb1ccece2d922fc2a1dbb95790002d2f2c8e5"} Dec 03 18:02:18 crc kubenswrapper[5002]: I1203 18:02:18.964292 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"f99a14c5-cbbd-4f08-aff8-3c8e921a0850","Type":"ContainerStarted","Data":"77a76d3a67361ef64bb5bf55740e66835aa02b80fb77fe6684dbe9dea476af64"} Dec 03 18:02:18 crc kubenswrapper[5002]: I1203 18:02:18.964372 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"f99a14c5-cbbd-4f08-aff8-3c8e921a0850","Type":"ContainerStarted","Data":"a376246ba1b1fc2ca819de7fe0f6f1fed1add6473e4f12c53a0db22246c8e67a"} Dec 03 18:02:18 crc kubenswrapper[5002]: I1203 18:02:18.975392 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=3.975352102 podStartE2EDuration="3.975352102s" podCreationTimestamp="2025-12-03 18:02:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:02:18.973988466 +0000 UTC m=+5462.387810364" watchObservedRunningTime="2025-12-03 18:02:18.975352102 +0000 UTC m=+5462.389173990" Dec 03 18:02:19 crc kubenswrapper[5002]: I1203 18:02:19.002680 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=4.002659726 podStartE2EDuration="4.002659726s" podCreationTimestamp="2025-12-03 18:02:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:02:18.99238506 +0000 UTC m=+5462.406206968" watchObservedRunningTime="2025-12-03 18:02:19.002659726 +0000 UTC m=+5462.416481614" Dec 03 18:02:19 crc kubenswrapper[5002]: I1203 18:02:19.016450 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.016427477 podStartE2EDuration="4.016427477s" podCreationTimestamp="2025-12-03 18:02:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:02:19.009787128 +0000 UTC m=+5462.423609026" watchObservedRunningTime="2025-12-03 18:02:19.016427477 +0000 UTC m=+5462.430249365" Dec 03 18:02:19 crc kubenswrapper[5002]: I1203 18:02:19.033157 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=5.033135337 podStartE2EDuration="5.033135337s" podCreationTimestamp="2025-12-03 18:02:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:02:19.028766149 +0000 UTC m=+5462.442588037" watchObservedRunningTime="2025-12-03 18:02:19.033135337 +0000 UTC m=+5462.446957225" Dec 03 18:02:19 crc kubenswrapper[5002]: I1203 18:02:19.153255 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:19 crc kubenswrapper[5002]: I1203 18:02:19.179577 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:19 crc kubenswrapper[5002]: I1203 18:02:19.189822 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:19 crc kubenswrapper[5002]: I1203 18:02:19.840231 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:02:19 crc kubenswrapper[5002]: E1203 18:02:19.840537 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:02:20 crc kubenswrapper[5002]: I1203 18:02:20.263091 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:20 crc kubenswrapper[5002]: I1203 18:02:20.289862 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:20 crc kubenswrapper[5002]: I1203 18:02:20.600155 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:21 crc kubenswrapper[5002]: I1203 18:02:21.154024 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:21 crc kubenswrapper[5002]: I1203 18:02:21.180049 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:21 crc kubenswrapper[5002]: I1203 18:02:21.189709 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.219978 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.245167 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.254102 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.263108 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.290107 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.290850 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.304700 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.312986 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.542246 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6fd5f64bc7-q97f9"] Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.543991 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.545991 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.563286 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fd5f64bc7-q97f9"] Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.586770 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-dns-svc\") pod \"dnsmasq-dns-6fd5f64bc7-q97f9\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.586867 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4m6z\" (UniqueName: \"kubernetes.io/projected/320047da-b851-4ae0-88f4-9888136bb0b1-kube-api-access-h4m6z\") pod \"dnsmasq-dns-6fd5f64bc7-q97f9\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.586967 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-config\") pod \"dnsmasq-dns-6fd5f64bc7-q97f9\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.587008 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-ovsdbserver-nb\") pod \"dnsmasq-dns-6fd5f64bc7-q97f9\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.601344 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.689539 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-dns-svc\") pod \"dnsmasq-dns-6fd5f64bc7-q97f9\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.689617 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4m6z\" (UniqueName: \"kubernetes.io/projected/320047da-b851-4ae0-88f4-9888136bb0b1-kube-api-access-h4m6z\") pod \"dnsmasq-dns-6fd5f64bc7-q97f9\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.689694 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-config\") pod \"dnsmasq-dns-6fd5f64bc7-q97f9\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.689714 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-ovsdbserver-nb\") pod \"dnsmasq-dns-6fd5f64bc7-q97f9\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.690736 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-ovsdbserver-nb\") pod \"dnsmasq-dns-6fd5f64bc7-q97f9\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.690957 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-dns-svc\") pod \"dnsmasq-dns-6fd5f64bc7-q97f9\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.691618 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-config\") pod \"dnsmasq-dns-6fd5f64bc7-q97f9\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.711535 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4m6z\" (UniqueName: \"kubernetes.io/projected/320047da-b851-4ae0-88f4-9888136bb0b1-kube-api-access-h4m6z\") pod \"dnsmasq-dns-6fd5f64bc7-q97f9\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:22 crc kubenswrapper[5002]: I1203 18:02:22.870184 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.300718 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fd5f64bc7-q97f9"] Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.313105 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.345044 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.365412 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.408799 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.647581 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.672691 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fd5f64bc7-q97f9"] Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.701218 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59745b6b7-sgs9m"] Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.703134 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.705322 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.715704 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59745b6b7-sgs9m"] Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.812786 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-dns-svc\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.812857 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-ovsdbserver-sb\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.812991 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-ovsdbserver-nb\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.813137 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnhlz\" (UniqueName: \"kubernetes.io/projected/c330ffba-d5bd-4165-bbd8-d39285920be0-kube-api-access-lnhlz\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.813197 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-config\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.914841 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnhlz\" (UniqueName: \"kubernetes.io/projected/c330ffba-d5bd-4165-bbd8-d39285920be0-kube-api-access-lnhlz\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.914941 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-config\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.915073 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-dns-svc\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.915149 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-ovsdbserver-sb\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.916144 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-config\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.916482 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-ovsdbserver-sb\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.917039 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-dns-svc\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.917263 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-ovsdbserver-nb\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.918234 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-ovsdbserver-nb\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:23 crc kubenswrapper[5002]: I1203 18:02:23.938019 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnhlz\" (UniqueName: \"kubernetes.io/projected/c330ffba-d5bd-4165-bbd8-d39285920be0-kube-api-access-lnhlz\") pod \"dnsmasq-dns-59745b6b7-sgs9m\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:24 crc kubenswrapper[5002]: I1203 18:02:24.013596 5002 generic.go:334] "Generic (PLEG): container finished" podID="320047da-b851-4ae0-88f4-9888136bb0b1" containerID="9e06c5b9605df887ccbfca9283aef4fb65953d073bb9857cf3396735a94547ac" exitCode=0 Dec 03 18:02:24 crc kubenswrapper[5002]: I1203 18:02:24.013702 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" event={"ID":"320047da-b851-4ae0-88f4-9888136bb0b1","Type":"ContainerDied","Data":"9e06c5b9605df887ccbfca9283aef4fb65953d073bb9857cf3396735a94547ac"} Dec 03 18:02:24 crc kubenswrapper[5002]: I1203 18:02:24.013762 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" event={"ID":"320047da-b851-4ae0-88f4-9888136bb0b1","Type":"ContainerStarted","Data":"52e93ae74164c94db50f7b3ae55560b404dbf571c635e7344be03a2dde964ba8"} Dec 03 18:02:24 crc kubenswrapper[5002]: I1203 18:02:24.069186 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Dec 03 18:02:24 crc kubenswrapper[5002]: I1203 18:02:24.076391 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:24 crc kubenswrapper[5002]: I1203 18:02:24.365078 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59745b6b7-sgs9m"] Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.024975 5002 generic.go:334] "Generic (PLEG): container finished" podID="c330ffba-d5bd-4165-bbd8-d39285920be0" containerID="83a72ee16860cd7b55bac1f8a770a883b63abdbef6e3f35d67f37aaf1862beab" exitCode=0 Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.025054 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" event={"ID":"c330ffba-d5bd-4165-bbd8-d39285920be0","Type":"ContainerDied","Data":"83a72ee16860cd7b55bac1f8a770a883b63abdbef6e3f35d67f37aaf1862beab"} Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.025342 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" event={"ID":"c330ffba-d5bd-4165-bbd8-d39285920be0","Type":"ContainerStarted","Data":"940227b974aa5e1b7b0218aaa6592f5e369bae12053fd1805eab3d29787c9313"} Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.027820 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" podUID="320047da-b851-4ae0-88f4-9888136bb0b1" containerName="dnsmasq-dns" containerID="cri-o://cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60" gracePeriod=10 Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.027933 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" event={"ID":"320047da-b851-4ae0-88f4-9888136bb0b1","Type":"ContainerStarted","Data":"cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60"} Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.028082 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.214163 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" podStartSLOduration=3.214139211 podStartE2EDuration="3.214139211s" podCreationTimestamp="2025-12-03 18:02:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:02:25.153243743 +0000 UTC m=+5468.567065631" watchObservedRunningTime="2025-12-03 18:02:25.214139211 +0000 UTC m=+5468.627961099" Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.507489 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.574709 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4m6z\" (UniqueName: \"kubernetes.io/projected/320047da-b851-4ae0-88f4-9888136bb0b1-kube-api-access-h4m6z\") pod \"320047da-b851-4ae0-88f4-9888136bb0b1\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.574832 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-config\") pod \"320047da-b851-4ae0-88f4-9888136bb0b1\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.574917 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-ovsdbserver-nb\") pod \"320047da-b851-4ae0-88f4-9888136bb0b1\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.574973 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-dns-svc\") pod \"320047da-b851-4ae0-88f4-9888136bb0b1\" (UID: \"320047da-b851-4ae0-88f4-9888136bb0b1\") " Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.579628 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/320047da-b851-4ae0-88f4-9888136bb0b1-kube-api-access-h4m6z" (OuterVolumeSpecName: "kube-api-access-h4m6z") pod "320047da-b851-4ae0-88f4-9888136bb0b1" (UID: "320047da-b851-4ae0-88f4-9888136bb0b1"). InnerVolumeSpecName "kube-api-access-h4m6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.613389 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "320047da-b851-4ae0-88f4-9888136bb0b1" (UID: "320047da-b851-4ae0-88f4-9888136bb0b1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.615685 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-config" (OuterVolumeSpecName: "config") pod "320047da-b851-4ae0-88f4-9888136bb0b1" (UID: "320047da-b851-4ae0-88f4-9888136bb0b1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.617735 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "320047da-b851-4ae0-88f4-9888136bb0b1" (UID: "320047da-b851-4ae0-88f4-9888136bb0b1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.677787 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.677980 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4m6z\" (UniqueName: \"kubernetes.io/projected/320047da-b851-4ae0-88f4-9888136bb0b1-kube-api-access-h4m6z\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.678069 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:25 crc kubenswrapper[5002]: I1203 18:02:25.678125 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/320047da-b851-4ae0-88f4-9888136bb0b1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.038918 5002 generic.go:334] "Generic (PLEG): container finished" podID="320047da-b851-4ae0-88f4-9888136bb0b1" containerID="cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60" exitCode=0 Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.038974 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.039007 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" event={"ID":"320047da-b851-4ae0-88f4-9888136bb0b1","Type":"ContainerDied","Data":"cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60"} Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.039595 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fd5f64bc7-q97f9" event={"ID":"320047da-b851-4ae0-88f4-9888136bb0b1","Type":"ContainerDied","Data":"52e93ae74164c94db50f7b3ae55560b404dbf571c635e7344be03a2dde964ba8"} Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.039663 5002 scope.go:117] "RemoveContainer" containerID="cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.041895 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" event={"ID":"c330ffba-d5bd-4165-bbd8-d39285920be0","Type":"ContainerStarted","Data":"19093cbffd37b47fb097bf5af5c5fd133eef1f8378bacd19f97a5bf125abaf1e"} Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.042271 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.067896 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" podStartSLOduration=3.067873952 podStartE2EDuration="3.067873952s" podCreationTimestamp="2025-12-03 18:02:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:02:26.064156973 +0000 UTC m=+5469.477978861" watchObservedRunningTime="2025-12-03 18:02:26.067873952 +0000 UTC m=+5469.481695830" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.069102 5002 scope.go:117] "RemoveContainer" containerID="9e06c5b9605df887ccbfca9283aef4fb65953d073bb9857cf3396735a94547ac" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.081990 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fd5f64bc7-q97f9"] Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.088003 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6fd5f64bc7-q97f9"] Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.105411 5002 scope.go:117] "RemoveContainer" containerID="cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60" Dec 03 18:02:26 crc kubenswrapper[5002]: E1203 18:02:26.105895 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60\": container with ID starting with cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60 not found: ID does not exist" containerID="cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.105946 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60"} err="failed to get container status \"cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60\": rpc error: code = NotFound desc = could not find container \"cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60\": container with ID starting with cc77538d1682ecf9c25f98bac29d0710200620113437cde1981429b0face2c60 not found: ID does not exist" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.105974 5002 scope.go:117] "RemoveContainer" containerID="9e06c5b9605df887ccbfca9283aef4fb65953d073bb9857cf3396735a94547ac" Dec 03 18:02:26 crc kubenswrapper[5002]: E1203 18:02:26.107025 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e06c5b9605df887ccbfca9283aef4fb65953d073bb9857cf3396735a94547ac\": container with ID starting with 9e06c5b9605df887ccbfca9283aef4fb65953d073bb9857cf3396735a94547ac not found: ID does not exist" containerID="9e06c5b9605df887ccbfca9283aef4fb65953d073bb9857cf3396735a94547ac" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.107074 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e06c5b9605df887ccbfca9283aef4fb65953d073bb9857cf3396735a94547ac"} err="failed to get container status \"9e06c5b9605df887ccbfca9283aef4fb65953d073bb9857cf3396735a94547ac\": rpc error: code = NotFound desc = could not find container \"9e06c5b9605df887ccbfca9283aef4fb65953d073bb9857cf3396735a94547ac\": container with ID starting with 9e06c5b9605df887ccbfca9283aef4fb65953d073bb9857cf3396735a94547ac not found: ID does not exist" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.721527 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Dec 03 18:02:26 crc kubenswrapper[5002]: E1203 18:02:26.722049 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="320047da-b851-4ae0-88f4-9888136bb0b1" containerName="dnsmasq-dns" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.722069 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="320047da-b851-4ae0-88f4-9888136bb0b1" containerName="dnsmasq-dns" Dec 03 18:02:26 crc kubenswrapper[5002]: E1203 18:02:26.722088 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="320047da-b851-4ae0-88f4-9888136bb0b1" containerName="init" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.722096 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="320047da-b851-4ae0-88f4-9888136bb0b1" containerName="init" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.722271 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="320047da-b851-4ae0-88f4-9888136bb0b1" containerName="dnsmasq-dns" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.722958 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.726050 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.733826 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.797260 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dlr6\" (UniqueName: \"kubernetes.io/projected/c69d8501-5a8d-428c-8df0-7af9b5a3d1ed-kube-api-access-5dlr6\") pod \"ovn-copy-data\" (UID: \"c69d8501-5a8d-428c-8df0-7af9b5a3d1ed\") " pod="openstack/ovn-copy-data" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.797320 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1ef62c2f-d961-45b3-99b6-3d01aaa5bae0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1ef62c2f-d961-45b3-99b6-3d01aaa5bae0\") pod \"ovn-copy-data\" (UID: \"c69d8501-5a8d-428c-8df0-7af9b5a3d1ed\") " pod="openstack/ovn-copy-data" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.797447 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/c69d8501-5a8d-428c-8df0-7af9b5a3d1ed-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"c69d8501-5a8d-428c-8df0-7af9b5a3d1ed\") " pod="openstack/ovn-copy-data" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.850511 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="320047da-b851-4ae0-88f4-9888136bb0b1" path="/var/lib/kubelet/pods/320047da-b851-4ae0-88f4-9888136bb0b1/volumes" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.899168 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/c69d8501-5a8d-428c-8df0-7af9b5a3d1ed-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"c69d8501-5a8d-428c-8df0-7af9b5a3d1ed\") " pod="openstack/ovn-copy-data" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.899299 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dlr6\" (UniqueName: \"kubernetes.io/projected/c69d8501-5a8d-428c-8df0-7af9b5a3d1ed-kube-api-access-5dlr6\") pod \"ovn-copy-data\" (UID: \"c69d8501-5a8d-428c-8df0-7af9b5a3d1ed\") " pod="openstack/ovn-copy-data" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.899336 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1ef62c2f-d961-45b3-99b6-3d01aaa5bae0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1ef62c2f-d961-45b3-99b6-3d01aaa5bae0\") pod \"ovn-copy-data\" (UID: \"c69d8501-5a8d-428c-8df0-7af9b5a3d1ed\") " pod="openstack/ovn-copy-data" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.904026 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.904084 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1ef62c2f-d961-45b3-99b6-3d01aaa5bae0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1ef62c2f-d961-45b3-99b6-3d01aaa5bae0\") pod \"ovn-copy-data\" (UID: \"c69d8501-5a8d-428c-8df0-7af9b5a3d1ed\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/054bee7e69bbe4c8db70a519f9534a98ec2422b8a3c108d52f860f2aaf014d6e/globalmount\"" pod="openstack/ovn-copy-data" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.915647 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/c69d8501-5a8d-428c-8df0-7af9b5a3d1ed-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"c69d8501-5a8d-428c-8df0-7af9b5a3d1ed\") " pod="openstack/ovn-copy-data" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.919012 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dlr6\" (UniqueName: \"kubernetes.io/projected/c69d8501-5a8d-428c-8df0-7af9b5a3d1ed-kube-api-access-5dlr6\") pod \"ovn-copy-data\" (UID: \"c69d8501-5a8d-428c-8df0-7af9b5a3d1ed\") " pod="openstack/ovn-copy-data" Dec 03 18:02:26 crc kubenswrapper[5002]: I1203 18:02:26.958660 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1ef62c2f-d961-45b3-99b6-3d01aaa5bae0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1ef62c2f-d961-45b3-99b6-3d01aaa5bae0\") pod \"ovn-copy-data\" (UID: \"c69d8501-5a8d-428c-8df0-7af9b5a3d1ed\") " pod="openstack/ovn-copy-data" Dec 03 18:02:27 crc kubenswrapper[5002]: I1203 18:02:27.048473 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Dec 03 18:02:27 crc kubenswrapper[5002]: I1203 18:02:27.584301 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Dec 03 18:02:27 crc kubenswrapper[5002]: W1203 18:02:27.586100 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc69d8501_5a8d_428c_8df0_7af9b5a3d1ed.slice/crio-ad5368053f0079f54198820798bb56f6974808bf09ec279ad5cea412df61ec07 WatchSource:0}: Error finding container ad5368053f0079f54198820798bb56f6974808bf09ec279ad5cea412df61ec07: Status 404 returned error can't find the container with id ad5368053f0079f54198820798bb56f6974808bf09ec279ad5cea412df61ec07 Dec 03 18:02:27 crc kubenswrapper[5002]: I1203 18:02:27.589674 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 18:02:28 crc kubenswrapper[5002]: I1203 18:02:28.074084 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"c69d8501-5a8d-428c-8df0-7af9b5a3d1ed","Type":"ContainerStarted","Data":"ad5368053f0079f54198820798bb56f6974808bf09ec279ad5cea412df61ec07"} Dec 03 18:02:29 crc kubenswrapper[5002]: I1203 18:02:29.085008 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"c69d8501-5a8d-428c-8df0-7af9b5a3d1ed","Type":"ContainerStarted","Data":"258d143031671135421196c6e61937ba85e7f1ba64a5bdca187c4d4104def812"} Dec 03 18:02:29 crc kubenswrapper[5002]: I1203 18:02:29.105042 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.455026351 podStartE2EDuration="4.105023329s" podCreationTimestamp="2025-12-03 18:02:25 +0000 UTC" firstStartedPulling="2025-12-03 18:02:27.589410271 +0000 UTC m=+5471.003232169" lastFinishedPulling="2025-12-03 18:02:28.239407259 +0000 UTC m=+5471.653229147" observedRunningTime="2025-12-03 18:02:29.101022653 +0000 UTC m=+5472.514844551" watchObservedRunningTime="2025-12-03 18:02:29.105023329 +0000 UTC m=+5472.518845217" Dec 03 18:02:31 crc kubenswrapper[5002]: I1203 18:02:31.840813 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:02:31 crc kubenswrapper[5002]: E1203 18:02:31.841643 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.077710 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.140317 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f79bf7859-nxvrw"] Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.140651 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" podUID="6a34a01e-9a6b-4b0f-bfbf-a2af8859558e" containerName="dnsmasq-dns" containerID="cri-o://c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b" gracePeriod=10 Dec 03 18:02:34 crc kubenswrapper[5002]: E1203 18:02:34.232686 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a34a01e_9a6b_4b0f_bfbf_a2af8859558e.slice/crio-c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a34a01e_9a6b_4b0f_bfbf_a2af8859558e.slice/crio-conmon-c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b.scope\": RecentStats: unable to find data in memory cache]" Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.590462 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.630972 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-config\") pod \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\" (UID: \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\") " Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.631166 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-dns-svc\") pod \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\" (UID: \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\") " Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.631294 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mrtz\" (UniqueName: \"kubernetes.io/projected/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-kube-api-access-2mrtz\") pod \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\" (UID: \"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e\") " Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.638487 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-kube-api-access-2mrtz" (OuterVolumeSpecName: "kube-api-access-2mrtz") pod "6a34a01e-9a6b-4b0f-bfbf-a2af8859558e" (UID: "6a34a01e-9a6b-4b0f-bfbf-a2af8859558e"). InnerVolumeSpecName "kube-api-access-2mrtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.680778 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-config" (OuterVolumeSpecName: "config") pod "6a34a01e-9a6b-4b0f-bfbf-a2af8859558e" (UID: "6a34a01e-9a6b-4b0f-bfbf-a2af8859558e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.696548 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6a34a01e-9a6b-4b0f-bfbf-a2af8859558e" (UID: "6a34a01e-9a6b-4b0f-bfbf-a2af8859558e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.735154 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mrtz\" (UniqueName: \"kubernetes.io/projected/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-kube-api-access-2mrtz\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.735188 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:34 crc kubenswrapper[5002]: I1203 18:02:34.735197 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.153530 5002 generic.go:334] "Generic (PLEG): container finished" podID="6a34a01e-9a6b-4b0f-bfbf-a2af8859558e" containerID="c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b" exitCode=0 Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.153591 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" event={"ID":"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e","Type":"ContainerDied","Data":"c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b"} Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.153626 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" event={"ID":"6a34a01e-9a6b-4b0f-bfbf-a2af8859558e","Type":"ContainerDied","Data":"b9f11ce461e1b81a25d97a6eaba0e72c05d6b491da45fae194f80a3b81074f75"} Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.153647 5002 scope.go:117] "RemoveContainer" containerID="c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.153861 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f79bf7859-nxvrw" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.177282 5002 scope.go:117] "RemoveContainer" containerID="85be67dbad28fb945d5fb4d20d77ac5e440605abb498e575c39739132c4f189e" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.184158 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f79bf7859-nxvrw"] Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.193163 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f79bf7859-nxvrw"] Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.201941 5002 scope.go:117] "RemoveContainer" containerID="c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b" Dec 03 18:02:35 crc kubenswrapper[5002]: E1203 18:02:35.203565 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b\": container with ID starting with c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b not found: ID does not exist" containerID="c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.203625 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b"} err="failed to get container status \"c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b\": rpc error: code = NotFound desc = could not find container \"c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b\": container with ID starting with c6c5a4885728366484b50d80c5e3c31bc98582845b4018917b7bf4dc79e0411b not found: ID does not exist" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.203662 5002 scope.go:117] "RemoveContainer" containerID="85be67dbad28fb945d5fb4d20d77ac5e440605abb498e575c39739132c4f189e" Dec 03 18:02:35 crc kubenswrapper[5002]: E1203 18:02:35.204331 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85be67dbad28fb945d5fb4d20d77ac5e440605abb498e575c39739132c4f189e\": container with ID starting with 85be67dbad28fb945d5fb4d20d77ac5e440605abb498e575c39739132c4f189e not found: ID does not exist" containerID="85be67dbad28fb945d5fb4d20d77ac5e440605abb498e575c39739132c4f189e" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.204457 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85be67dbad28fb945d5fb4d20d77ac5e440605abb498e575c39739132c4f189e"} err="failed to get container status \"85be67dbad28fb945d5fb4d20d77ac5e440605abb498e575c39739132c4f189e\": rpc error: code = NotFound desc = could not find container \"85be67dbad28fb945d5fb4d20d77ac5e440605abb498e575c39739132c4f189e\": container with ID starting with 85be67dbad28fb945d5fb4d20d77ac5e440605abb498e575c39739132c4f189e not found: ID does not exist" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.585558 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 03 18:02:35 crc kubenswrapper[5002]: E1203 18:02:35.585958 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a34a01e-9a6b-4b0f-bfbf-a2af8859558e" containerName="dnsmasq-dns" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.585985 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a34a01e-9a6b-4b0f-bfbf-a2af8859558e" containerName="dnsmasq-dns" Dec 03 18:02:35 crc kubenswrapper[5002]: E1203 18:02:35.586002 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a34a01e-9a6b-4b0f-bfbf-a2af8859558e" containerName="init" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.586012 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a34a01e-9a6b-4b0f-bfbf-a2af8859558e" containerName="init" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.586206 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a34a01e-9a6b-4b0f-bfbf-a2af8859558e" containerName="dnsmasq-dns" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.598532 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.603792 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.604497 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-r5frz" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.604497 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.608551 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.615324 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.650012 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/95a49eca-0a96-460d-8366-7a65e3a93c4c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.650088 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/95a49eca-0a96-460d-8366-7a65e3a93c4c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.650136 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95a49eca-0a96-460d-8366-7a65e3a93c4c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.650199 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95a49eca-0a96-460d-8366-7a65e3a93c4c-config\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.650255 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95a49eca-0a96-460d-8366-7a65e3a93c4c-scripts\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.650301 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/95a49eca-0a96-460d-8366-7a65e3a93c4c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.650325 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zthzk\" (UniqueName: \"kubernetes.io/projected/95a49eca-0a96-460d-8366-7a65e3a93c4c-kube-api-access-zthzk\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.751829 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95a49eca-0a96-460d-8366-7a65e3a93c4c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.751911 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95a49eca-0a96-460d-8366-7a65e3a93c4c-config\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.751957 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95a49eca-0a96-460d-8366-7a65e3a93c4c-scripts\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.751987 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/95a49eca-0a96-460d-8366-7a65e3a93c4c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.752006 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zthzk\" (UniqueName: \"kubernetes.io/projected/95a49eca-0a96-460d-8366-7a65e3a93c4c-kube-api-access-zthzk\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.752039 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/95a49eca-0a96-460d-8366-7a65e3a93c4c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.752065 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/95a49eca-0a96-460d-8366-7a65e3a93c4c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.753149 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95a49eca-0a96-460d-8366-7a65e3a93c4c-config\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.753307 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/95a49eca-0a96-460d-8366-7a65e3a93c4c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.753489 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95a49eca-0a96-460d-8366-7a65e3a93c4c-scripts\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.756449 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/95a49eca-0a96-460d-8366-7a65e3a93c4c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.756486 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95a49eca-0a96-460d-8366-7a65e3a93c4c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.764364 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/95a49eca-0a96-460d-8366-7a65e3a93c4c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.777522 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zthzk\" (UniqueName: \"kubernetes.io/projected/95a49eca-0a96-460d-8366-7a65e3a93c4c-kube-api-access-zthzk\") pod \"ovn-northd-0\" (UID: \"95a49eca-0a96-460d-8366-7a65e3a93c4c\") " pod="openstack/ovn-northd-0" Dec 03 18:02:35 crc kubenswrapper[5002]: I1203 18:02:35.922036 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 18:02:36 crc kubenswrapper[5002]: I1203 18:02:36.384960 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 18:02:36 crc kubenswrapper[5002]: W1203 18:02:36.392132 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95a49eca_0a96_460d_8366_7a65e3a93c4c.slice/crio-b32d9bd25ae7eabf983883ac5f6f014aa5909a35855f61ed2e6f111abb1b32cc WatchSource:0}: Error finding container b32d9bd25ae7eabf983883ac5f6f014aa5909a35855f61ed2e6f111abb1b32cc: Status 404 returned error can't find the container with id b32d9bd25ae7eabf983883ac5f6f014aa5909a35855f61ed2e6f111abb1b32cc Dec 03 18:02:36 crc kubenswrapper[5002]: I1203 18:02:36.854428 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a34a01e-9a6b-4b0f-bfbf-a2af8859558e" path="/var/lib/kubelet/pods/6a34a01e-9a6b-4b0f-bfbf-a2af8859558e/volumes" Dec 03 18:02:37 crc kubenswrapper[5002]: I1203 18:02:37.175107 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"95a49eca-0a96-460d-8366-7a65e3a93c4c","Type":"ContainerStarted","Data":"d720a044f13c2335995745c8fa8e719f0ff6b20c7f7b039414cb33481e0e76a8"} Dec 03 18:02:37 crc kubenswrapper[5002]: I1203 18:02:37.175158 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"95a49eca-0a96-460d-8366-7a65e3a93c4c","Type":"ContainerStarted","Data":"bbd91d788096a3b82be2a6587078c35a106fc20bdda18e74a795808dbd4aeb80"} Dec 03 18:02:37 crc kubenswrapper[5002]: I1203 18:02:37.175170 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"95a49eca-0a96-460d-8366-7a65e3a93c4c","Type":"ContainerStarted","Data":"b32d9bd25ae7eabf983883ac5f6f014aa5909a35855f61ed2e6f111abb1b32cc"} Dec 03 18:02:37 crc kubenswrapper[5002]: I1203 18:02:37.175329 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 03 18:02:37 crc kubenswrapper[5002]: I1203 18:02:37.208035 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.208001249 podStartE2EDuration="2.208001249s" podCreationTimestamp="2025-12-03 18:02:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:02:37.201489304 +0000 UTC m=+5480.615311192" watchObservedRunningTime="2025-12-03 18:02:37.208001249 +0000 UTC m=+5480.621823177" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.411641 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-fmxnp"] Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.413337 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fmxnp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.428652 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7f6a-account-create-update-lxrjp"] Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.430193 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7f6a-account-create-update-lxrjp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.433031 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.437463 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-fmxnp"] Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.449349 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7f6a-account-create-update-lxrjp"] Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.453958 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7a9fcc4-d145-47aa-bc44-f31309080419-operator-scripts\") pod \"keystone-db-create-fmxnp\" (UID: \"a7a9fcc4-d145-47aa-bc44-f31309080419\") " pod="openstack/keystone-db-create-fmxnp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.454019 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f-operator-scripts\") pod \"keystone-7f6a-account-create-update-lxrjp\" (UID: \"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f\") " pod="openstack/keystone-7f6a-account-create-update-lxrjp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.454048 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znzm5\" (UniqueName: \"kubernetes.io/projected/a7a9fcc4-d145-47aa-bc44-f31309080419-kube-api-access-znzm5\") pod \"keystone-db-create-fmxnp\" (UID: \"a7a9fcc4-d145-47aa-bc44-f31309080419\") " pod="openstack/keystone-db-create-fmxnp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.454080 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5xnp\" (UniqueName: \"kubernetes.io/projected/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f-kube-api-access-z5xnp\") pod \"keystone-7f6a-account-create-update-lxrjp\" (UID: \"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f\") " pod="openstack/keystone-7f6a-account-create-update-lxrjp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.554850 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7a9fcc4-d145-47aa-bc44-f31309080419-operator-scripts\") pod \"keystone-db-create-fmxnp\" (UID: \"a7a9fcc4-d145-47aa-bc44-f31309080419\") " pod="openstack/keystone-db-create-fmxnp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.554905 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f-operator-scripts\") pod \"keystone-7f6a-account-create-update-lxrjp\" (UID: \"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f\") " pod="openstack/keystone-7f6a-account-create-update-lxrjp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.554964 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znzm5\" (UniqueName: \"kubernetes.io/projected/a7a9fcc4-d145-47aa-bc44-f31309080419-kube-api-access-znzm5\") pod \"keystone-db-create-fmxnp\" (UID: \"a7a9fcc4-d145-47aa-bc44-f31309080419\") " pod="openstack/keystone-db-create-fmxnp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.554994 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5xnp\" (UniqueName: \"kubernetes.io/projected/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f-kube-api-access-z5xnp\") pod \"keystone-7f6a-account-create-update-lxrjp\" (UID: \"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f\") " pod="openstack/keystone-7f6a-account-create-update-lxrjp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.555953 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7a9fcc4-d145-47aa-bc44-f31309080419-operator-scripts\") pod \"keystone-db-create-fmxnp\" (UID: \"a7a9fcc4-d145-47aa-bc44-f31309080419\") " pod="openstack/keystone-db-create-fmxnp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.555953 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f-operator-scripts\") pod \"keystone-7f6a-account-create-update-lxrjp\" (UID: \"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f\") " pod="openstack/keystone-7f6a-account-create-update-lxrjp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.577825 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znzm5\" (UniqueName: \"kubernetes.io/projected/a7a9fcc4-d145-47aa-bc44-f31309080419-kube-api-access-znzm5\") pod \"keystone-db-create-fmxnp\" (UID: \"a7a9fcc4-d145-47aa-bc44-f31309080419\") " pod="openstack/keystone-db-create-fmxnp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.578602 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5xnp\" (UniqueName: \"kubernetes.io/projected/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f-kube-api-access-z5xnp\") pod \"keystone-7f6a-account-create-update-lxrjp\" (UID: \"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f\") " pod="openstack/keystone-7f6a-account-create-update-lxrjp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.738944 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fmxnp" Dec 03 18:02:41 crc kubenswrapper[5002]: I1203 18:02:41.763879 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7f6a-account-create-update-lxrjp" Dec 03 18:02:42 crc kubenswrapper[5002]: W1203 18:02:42.236263 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcaa55ef5_b64d_4782_9f9a_9ae57c8ecd5f.slice/crio-4e776d4a23e1fa8498a9e3b7e00f3a6fa920fd1712153096f0d8d00d1651183a WatchSource:0}: Error finding container 4e776d4a23e1fa8498a9e3b7e00f3a6fa920fd1712153096f0d8d00d1651183a: Status 404 returned error can't find the container with id 4e776d4a23e1fa8498a9e3b7e00f3a6fa920fd1712153096f0d8d00d1651183a Dec 03 18:02:42 crc kubenswrapper[5002]: I1203 18:02:42.237920 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7f6a-account-create-update-lxrjp"] Dec 03 18:02:42 crc kubenswrapper[5002]: I1203 18:02:42.306539 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-fmxnp"] Dec 03 18:02:42 crc kubenswrapper[5002]: W1203 18:02:42.324535 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7a9fcc4_d145_47aa_bc44_f31309080419.slice/crio-877534b90960a0ab0dfbe97a820d6a92ee9d201baebf296a59d7b2422aa8830a WatchSource:0}: Error finding container 877534b90960a0ab0dfbe97a820d6a92ee9d201baebf296a59d7b2422aa8830a: Status 404 returned error can't find the container with id 877534b90960a0ab0dfbe97a820d6a92ee9d201baebf296a59d7b2422aa8830a Dec 03 18:02:43 crc kubenswrapper[5002]: I1203 18:02:43.233886 5002 generic.go:334] "Generic (PLEG): container finished" podID="caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f" containerID="0070b16044af8613d27073576ef302b5b52d3790e3375233066f86781097eddd" exitCode=0 Dec 03 18:02:43 crc kubenswrapper[5002]: I1203 18:02:43.233947 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7f6a-account-create-update-lxrjp" event={"ID":"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f","Type":"ContainerDied","Data":"0070b16044af8613d27073576ef302b5b52d3790e3375233066f86781097eddd"} Dec 03 18:02:43 crc kubenswrapper[5002]: I1203 18:02:43.235430 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7f6a-account-create-update-lxrjp" event={"ID":"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f","Type":"ContainerStarted","Data":"4e776d4a23e1fa8498a9e3b7e00f3a6fa920fd1712153096f0d8d00d1651183a"} Dec 03 18:02:43 crc kubenswrapper[5002]: I1203 18:02:43.237725 5002 generic.go:334] "Generic (PLEG): container finished" podID="a7a9fcc4-d145-47aa-bc44-f31309080419" containerID="873abe10299607fc9764747fd60143187437ea34d8b5e840fe8c3a414ae0bd62" exitCode=0 Dec 03 18:02:43 crc kubenswrapper[5002]: I1203 18:02:43.237790 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fmxnp" event={"ID":"a7a9fcc4-d145-47aa-bc44-f31309080419","Type":"ContainerDied","Data":"873abe10299607fc9764747fd60143187437ea34d8b5e840fe8c3a414ae0bd62"} Dec 03 18:02:43 crc kubenswrapper[5002]: I1203 18:02:43.237823 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fmxnp" event={"ID":"a7a9fcc4-d145-47aa-bc44-f31309080419","Type":"ContainerStarted","Data":"877534b90960a0ab0dfbe97a820d6a92ee9d201baebf296a59d7b2422aa8830a"} Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.671476 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fmxnp" Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.679624 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7f6a-account-create-update-lxrjp" Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.730519 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znzm5\" (UniqueName: \"kubernetes.io/projected/a7a9fcc4-d145-47aa-bc44-f31309080419-kube-api-access-znzm5\") pod \"a7a9fcc4-d145-47aa-bc44-f31309080419\" (UID: \"a7a9fcc4-d145-47aa-bc44-f31309080419\") " Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.730694 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5xnp\" (UniqueName: \"kubernetes.io/projected/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f-kube-api-access-z5xnp\") pod \"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f\" (UID: \"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f\") " Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.731048 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7a9fcc4-d145-47aa-bc44-f31309080419-operator-scripts\") pod \"a7a9fcc4-d145-47aa-bc44-f31309080419\" (UID: \"a7a9fcc4-d145-47aa-bc44-f31309080419\") " Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.731081 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f-operator-scripts\") pod \"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f\" (UID: \"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f\") " Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.731854 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7a9fcc4-d145-47aa-bc44-f31309080419-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a7a9fcc4-d145-47aa-bc44-f31309080419" (UID: "a7a9fcc4-d145-47aa-bc44-f31309080419"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.731896 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f" (UID: "caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.736958 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f-kube-api-access-z5xnp" (OuterVolumeSpecName: "kube-api-access-z5xnp") pod "caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f" (UID: "caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f"). InnerVolumeSpecName "kube-api-access-z5xnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.737067 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7a9fcc4-d145-47aa-bc44-f31309080419-kube-api-access-znzm5" (OuterVolumeSpecName: "kube-api-access-znzm5") pod "a7a9fcc4-d145-47aa-bc44-f31309080419" (UID: "a7a9fcc4-d145-47aa-bc44-f31309080419"). InnerVolumeSpecName "kube-api-access-znzm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.833170 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5xnp\" (UniqueName: \"kubernetes.io/projected/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f-kube-api-access-z5xnp\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.833252 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7a9fcc4-d145-47aa-bc44-f31309080419-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.833267 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:44 crc kubenswrapper[5002]: I1203 18:02:44.833277 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znzm5\" (UniqueName: \"kubernetes.io/projected/a7a9fcc4-d145-47aa-bc44-f31309080419-kube-api-access-znzm5\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:45 crc kubenswrapper[5002]: I1203 18:02:45.253944 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fmxnp" event={"ID":"a7a9fcc4-d145-47aa-bc44-f31309080419","Type":"ContainerDied","Data":"877534b90960a0ab0dfbe97a820d6a92ee9d201baebf296a59d7b2422aa8830a"} Dec 03 18:02:45 crc kubenswrapper[5002]: I1203 18:02:45.254005 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="877534b90960a0ab0dfbe97a820d6a92ee9d201baebf296a59d7b2422aa8830a" Dec 03 18:02:45 crc kubenswrapper[5002]: I1203 18:02:45.254064 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fmxnp" Dec 03 18:02:45 crc kubenswrapper[5002]: I1203 18:02:45.256686 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7f6a-account-create-update-lxrjp" event={"ID":"caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f","Type":"ContainerDied","Data":"4e776d4a23e1fa8498a9e3b7e00f3a6fa920fd1712153096f0d8d00d1651183a"} Dec 03 18:02:45 crc kubenswrapper[5002]: I1203 18:02:45.256735 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e776d4a23e1fa8498a9e3b7e00f3a6fa920fd1712153096f0d8d00d1651183a" Dec 03 18:02:45 crc kubenswrapper[5002]: I1203 18:02:45.256910 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7f6a-account-create-update-lxrjp" Dec 03 18:02:46 crc kubenswrapper[5002]: I1203 18:02:46.845434 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:02:46 crc kubenswrapper[5002]: E1203 18:02:46.846037 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.030979 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-zth4g"] Dec 03 18:02:47 crc kubenswrapper[5002]: E1203 18:02:47.032568 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f" containerName="mariadb-account-create-update" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.032610 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f" containerName="mariadb-account-create-update" Dec 03 18:02:47 crc kubenswrapper[5002]: E1203 18:02:47.032667 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7a9fcc4-d145-47aa-bc44-f31309080419" containerName="mariadb-database-create" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.032678 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7a9fcc4-d145-47aa-bc44-f31309080419" containerName="mariadb-database-create" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.033171 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f" containerName="mariadb-account-create-update" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.033199 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7a9fcc4-d145-47aa-bc44-f31309080419" containerName="mariadb-database-create" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.034348 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.036620 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hpchv" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.037168 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.037464 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.037655 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.050247 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-zth4g"] Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.075895 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c8eebd-bf20-45d1-9c0b-42480190c8c9-config-data\") pod \"keystone-db-sync-zth4g\" (UID: \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\") " pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.075957 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2v7tz\" (UniqueName: \"kubernetes.io/projected/05c8eebd-bf20-45d1-9c0b-42480190c8c9-kube-api-access-2v7tz\") pod \"keystone-db-sync-zth4g\" (UID: \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\") " pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.075979 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c8eebd-bf20-45d1-9c0b-42480190c8c9-combined-ca-bundle\") pod \"keystone-db-sync-zth4g\" (UID: \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\") " pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.178093 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c8eebd-bf20-45d1-9c0b-42480190c8c9-config-data\") pod \"keystone-db-sync-zth4g\" (UID: \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\") " pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.178146 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2v7tz\" (UniqueName: \"kubernetes.io/projected/05c8eebd-bf20-45d1-9c0b-42480190c8c9-kube-api-access-2v7tz\") pod \"keystone-db-sync-zth4g\" (UID: \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\") " pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.178168 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c8eebd-bf20-45d1-9c0b-42480190c8c9-combined-ca-bundle\") pod \"keystone-db-sync-zth4g\" (UID: \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\") " pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.184709 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c8eebd-bf20-45d1-9c0b-42480190c8c9-config-data\") pod \"keystone-db-sync-zth4g\" (UID: \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\") " pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.186401 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c8eebd-bf20-45d1-9c0b-42480190c8c9-combined-ca-bundle\") pod \"keystone-db-sync-zth4g\" (UID: \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\") " pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.194503 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2v7tz\" (UniqueName: \"kubernetes.io/projected/05c8eebd-bf20-45d1-9c0b-42480190c8c9-kube-api-access-2v7tz\") pod \"keystone-db-sync-zth4g\" (UID: \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\") " pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.354264 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:47 crc kubenswrapper[5002]: I1203 18:02:47.805577 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-zth4g"] Dec 03 18:02:47 crc kubenswrapper[5002]: W1203 18:02:47.814035 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05c8eebd_bf20_45d1_9c0b_42480190c8c9.slice/crio-037e63bc62458fe0d203f726eeaffebb960f7f6ef68845d342d020f6f743bc9b WatchSource:0}: Error finding container 037e63bc62458fe0d203f726eeaffebb960f7f6ef68845d342d020f6f743bc9b: Status 404 returned error can't find the container with id 037e63bc62458fe0d203f726eeaffebb960f7f6ef68845d342d020f6f743bc9b Dec 03 18:02:48 crc kubenswrapper[5002]: I1203 18:02:48.282560 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-zth4g" event={"ID":"05c8eebd-bf20-45d1-9c0b-42480190c8c9","Type":"ContainerStarted","Data":"037e63bc62458fe0d203f726eeaffebb960f7f6ef68845d342d020f6f743bc9b"} Dec 03 18:02:49 crc kubenswrapper[5002]: I1203 18:02:49.295127 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-zth4g" event={"ID":"05c8eebd-bf20-45d1-9c0b-42480190c8c9","Type":"ContainerStarted","Data":"4fb56380c30add21cc422377ee92ae493904f70445429924d3e3dcca7d36f98f"} Dec 03 18:02:49 crc kubenswrapper[5002]: I1203 18:02:49.315120 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-zth4g" podStartSLOduration=3.315097343 podStartE2EDuration="3.315097343s" podCreationTimestamp="2025-12-03 18:02:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:02:49.307626441 +0000 UTC m=+5492.721448339" watchObservedRunningTime="2025-12-03 18:02:49.315097343 +0000 UTC m=+5492.728919231" Dec 03 18:02:50 crc kubenswrapper[5002]: I1203 18:02:50.305059 5002 generic.go:334] "Generic (PLEG): container finished" podID="05c8eebd-bf20-45d1-9c0b-42480190c8c9" containerID="4fb56380c30add21cc422377ee92ae493904f70445429924d3e3dcca7d36f98f" exitCode=0 Dec 03 18:02:50 crc kubenswrapper[5002]: I1203 18:02:50.305137 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-zth4g" event={"ID":"05c8eebd-bf20-45d1-9c0b-42480190c8c9","Type":"ContainerDied","Data":"4fb56380c30add21cc422377ee92ae493904f70445429924d3e3dcca7d36f98f"} Dec 03 18:02:50 crc kubenswrapper[5002]: I1203 18:02:50.993021 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 03 18:02:51 crc kubenswrapper[5002]: I1203 18:02:51.644920 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:51 crc kubenswrapper[5002]: I1203 18:02:51.760713 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2v7tz\" (UniqueName: \"kubernetes.io/projected/05c8eebd-bf20-45d1-9c0b-42480190c8c9-kube-api-access-2v7tz\") pod \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\" (UID: \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\") " Dec 03 18:02:51 crc kubenswrapper[5002]: I1203 18:02:51.760861 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c8eebd-bf20-45d1-9c0b-42480190c8c9-config-data\") pod \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\" (UID: \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\") " Dec 03 18:02:51 crc kubenswrapper[5002]: I1203 18:02:51.761686 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c8eebd-bf20-45d1-9c0b-42480190c8c9-combined-ca-bundle\") pod \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\" (UID: \"05c8eebd-bf20-45d1-9c0b-42480190c8c9\") " Dec 03 18:02:51 crc kubenswrapper[5002]: I1203 18:02:51.765924 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05c8eebd-bf20-45d1-9c0b-42480190c8c9-kube-api-access-2v7tz" (OuterVolumeSpecName: "kube-api-access-2v7tz") pod "05c8eebd-bf20-45d1-9c0b-42480190c8c9" (UID: "05c8eebd-bf20-45d1-9c0b-42480190c8c9"). InnerVolumeSpecName "kube-api-access-2v7tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:02:51 crc kubenswrapper[5002]: I1203 18:02:51.785608 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05c8eebd-bf20-45d1-9c0b-42480190c8c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05c8eebd-bf20-45d1-9c0b-42480190c8c9" (UID: "05c8eebd-bf20-45d1-9c0b-42480190c8c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:02:51 crc kubenswrapper[5002]: I1203 18:02:51.799994 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05c8eebd-bf20-45d1-9c0b-42480190c8c9-config-data" (OuterVolumeSpecName: "config-data") pod "05c8eebd-bf20-45d1-9c0b-42480190c8c9" (UID: "05c8eebd-bf20-45d1-9c0b-42480190c8c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:02:51 crc kubenswrapper[5002]: I1203 18:02:51.864179 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2v7tz\" (UniqueName: \"kubernetes.io/projected/05c8eebd-bf20-45d1-9c0b-42480190c8c9-kube-api-access-2v7tz\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:51 crc kubenswrapper[5002]: I1203 18:02:51.864241 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c8eebd-bf20-45d1-9c0b-42480190c8c9-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:51 crc kubenswrapper[5002]: I1203 18:02:51.864261 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c8eebd-bf20-45d1-9c0b-42480190c8c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.321359 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-zth4g" event={"ID":"05c8eebd-bf20-45d1-9c0b-42480190c8c9","Type":"ContainerDied","Data":"037e63bc62458fe0d203f726eeaffebb960f7f6ef68845d342d020f6f743bc9b"} Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.321408 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="037e63bc62458fe0d203f726eeaffebb960f7f6ef68845d342d020f6f743bc9b" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.321414 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-zth4g" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.566204 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-fqlww"] Dec 03 18:02:52 crc kubenswrapper[5002]: E1203 18:02:52.566644 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05c8eebd-bf20-45d1-9c0b-42480190c8c9" containerName="keystone-db-sync" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.566668 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="05c8eebd-bf20-45d1-9c0b-42480190c8c9" containerName="keystone-db-sync" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.566932 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="05c8eebd-bf20-45d1-9c0b-42480190c8c9" containerName="keystone-db-sync" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.567703 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.571166 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.571454 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.572202 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hpchv" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.584903 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fqlww"] Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.587553 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.590405 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.604431 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-644fc4fd69-lcc74"] Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.605920 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.638101 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-644fc4fd69-lcc74"] Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.675684 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-dns-svc\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.675778 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkczs\" (UniqueName: \"kubernetes.io/projected/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-kube-api-access-rkczs\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.675812 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-fernet-keys\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.675835 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-config-data\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.675917 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-scripts\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.675979 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-combined-ca-bundle\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.676013 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-credential-keys\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.676168 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-config\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.676255 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqqk4\" (UniqueName: \"kubernetes.io/projected/3788c701-44fb-4959-b69e-920899cd9daa-kube-api-access-hqqk4\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.676371 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-ovsdbserver-sb\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.676404 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-ovsdbserver-nb\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.778372 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-scripts\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.778454 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-combined-ca-bundle\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.778498 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-credential-keys\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.778540 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-config\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.778577 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqqk4\" (UniqueName: \"kubernetes.io/projected/3788c701-44fb-4959-b69e-920899cd9daa-kube-api-access-hqqk4\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.778635 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-ovsdbserver-sb\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.778662 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-ovsdbserver-nb\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.778712 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-dns-svc\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.778773 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkczs\" (UniqueName: \"kubernetes.io/projected/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-kube-api-access-rkczs\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.778794 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-fernet-keys\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.778815 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-config-data\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.779965 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-ovsdbserver-sb\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.780048 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-config\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.783648 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-ovsdbserver-nb\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.788170 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-scripts\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.789862 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-combined-ca-bundle\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.797685 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-dns-svc\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.797802 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-credential-keys\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.798834 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-fernet-keys\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.800429 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-config-data\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.800929 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkczs\" (UniqueName: \"kubernetes.io/projected/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-kube-api-access-rkczs\") pod \"dnsmasq-dns-644fc4fd69-lcc74\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.802512 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqqk4\" (UniqueName: \"kubernetes.io/projected/3788c701-44fb-4959-b69e-920899cd9daa-kube-api-access-hqqk4\") pod \"keystone-bootstrap-fqlww\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.901891 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:52 crc kubenswrapper[5002]: I1203 18:02:52.936851 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:53 crc kubenswrapper[5002]: I1203 18:02:53.386651 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fqlww"] Dec 03 18:02:53 crc kubenswrapper[5002]: I1203 18:02:53.493735 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-644fc4fd69-lcc74"] Dec 03 18:02:54 crc kubenswrapper[5002]: I1203 18:02:54.337305 5002 generic.go:334] "Generic (PLEG): container finished" podID="c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" containerID="42f3bae3906a2e05de44868b8469842343d86a83baf89556161dd6b5974cdd5f" exitCode=0 Dec 03 18:02:54 crc kubenswrapper[5002]: I1203 18:02:54.337382 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" event={"ID":"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8","Type":"ContainerDied","Data":"42f3bae3906a2e05de44868b8469842343d86a83baf89556161dd6b5974cdd5f"} Dec 03 18:02:54 crc kubenswrapper[5002]: I1203 18:02:54.337671 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" event={"ID":"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8","Type":"ContainerStarted","Data":"841722a43acd5a1623736bd88ecde43e93952fc91e20ad4266fecd9e34005848"} Dec 03 18:02:54 crc kubenswrapper[5002]: I1203 18:02:54.339968 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fqlww" event={"ID":"3788c701-44fb-4959-b69e-920899cd9daa","Type":"ContainerStarted","Data":"99288426a7baf36ea4c4e9a36e81263b1e31e298d326342f42d24281e45b9114"} Dec 03 18:02:54 crc kubenswrapper[5002]: I1203 18:02:54.340019 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fqlww" event={"ID":"3788c701-44fb-4959-b69e-920899cd9daa","Type":"ContainerStarted","Data":"79b241f16d9b41414a7cdcefc99d877c43591676689e1047173fc084b9e2ae15"} Dec 03 18:02:54 crc kubenswrapper[5002]: I1203 18:02:54.392800 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-fqlww" podStartSLOduration=2.392777082 podStartE2EDuration="2.392777082s" podCreationTimestamp="2025-12-03 18:02:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:02:54.381067707 +0000 UTC m=+5497.794889595" watchObservedRunningTime="2025-12-03 18:02:54.392777082 +0000 UTC m=+5497.806598980" Dec 03 18:02:55 crc kubenswrapper[5002]: I1203 18:02:55.354802 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" event={"ID":"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8","Type":"ContainerStarted","Data":"ba6d177f1a840691c2d7ae44387ca755a92245b51c87da5fc9b0c1bdd59fe52f"} Dec 03 18:02:55 crc kubenswrapper[5002]: I1203 18:02:55.355259 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:02:55 crc kubenswrapper[5002]: I1203 18:02:55.377184 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" podStartSLOduration=3.377152547 podStartE2EDuration="3.377152547s" podCreationTimestamp="2025-12-03 18:02:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:02:55.376553151 +0000 UTC m=+5498.790375099" watchObservedRunningTime="2025-12-03 18:02:55.377152547 +0000 UTC m=+5498.790974465" Dec 03 18:02:57 crc kubenswrapper[5002]: I1203 18:02:57.841119 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:02:57 crc kubenswrapper[5002]: E1203 18:02:57.841776 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:02:58 crc kubenswrapper[5002]: I1203 18:02:58.377089 5002 generic.go:334] "Generic (PLEG): container finished" podID="3788c701-44fb-4959-b69e-920899cd9daa" containerID="99288426a7baf36ea4c4e9a36e81263b1e31e298d326342f42d24281e45b9114" exitCode=0 Dec 03 18:02:58 crc kubenswrapper[5002]: I1203 18:02:58.377140 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fqlww" event={"ID":"3788c701-44fb-4959-b69e-920899cd9daa","Type":"ContainerDied","Data":"99288426a7baf36ea4c4e9a36e81263b1e31e298d326342f42d24281e45b9114"} Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.800325 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.920245 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqqk4\" (UniqueName: \"kubernetes.io/projected/3788c701-44fb-4959-b69e-920899cd9daa-kube-api-access-hqqk4\") pod \"3788c701-44fb-4959-b69e-920899cd9daa\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.920324 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-scripts\") pod \"3788c701-44fb-4959-b69e-920899cd9daa\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.920351 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-combined-ca-bundle\") pod \"3788c701-44fb-4959-b69e-920899cd9daa\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.920374 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-config-data\") pod \"3788c701-44fb-4959-b69e-920899cd9daa\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.920402 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-credential-keys\") pod \"3788c701-44fb-4959-b69e-920899cd9daa\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.920420 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-fernet-keys\") pod \"3788c701-44fb-4959-b69e-920899cd9daa\" (UID: \"3788c701-44fb-4959-b69e-920899cd9daa\") " Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.927538 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-scripts" (OuterVolumeSpecName: "scripts") pod "3788c701-44fb-4959-b69e-920899cd9daa" (UID: "3788c701-44fb-4959-b69e-920899cd9daa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.928845 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3788c701-44fb-4959-b69e-920899cd9daa" (UID: "3788c701-44fb-4959-b69e-920899cd9daa"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.929257 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3788c701-44fb-4959-b69e-920899cd9daa" (UID: "3788c701-44fb-4959-b69e-920899cd9daa"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.930451 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3788c701-44fb-4959-b69e-920899cd9daa-kube-api-access-hqqk4" (OuterVolumeSpecName: "kube-api-access-hqqk4") pod "3788c701-44fb-4959-b69e-920899cd9daa" (UID: "3788c701-44fb-4959-b69e-920899cd9daa"). InnerVolumeSpecName "kube-api-access-hqqk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.946161 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-config-data" (OuterVolumeSpecName: "config-data") pod "3788c701-44fb-4959-b69e-920899cd9daa" (UID: "3788c701-44fb-4959-b69e-920899cd9daa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:02:59 crc kubenswrapper[5002]: I1203 18:02:59.946261 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3788c701-44fb-4959-b69e-920899cd9daa" (UID: "3788c701-44fb-4959-b69e-920899cd9daa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.022986 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqqk4\" (UniqueName: \"kubernetes.io/projected/3788c701-44fb-4959-b69e-920899cd9daa-kube-api-access-hqqk4\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.023033 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.023046 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.023058 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.023069 5002 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.023081 5002 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3788c701-44fb-4959-b69e-920899cd9daa-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.398241 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fqlww" event={"ID":"3788c701-44fb-4959-b69e-920899cd9daa","Type":"ContainerDied","Data":"79b241f16d9b41414a7cdcefc99d877c43591676689e1047173fc084b9e2ae15"} Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.398276 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fqlww" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.398293 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79b241f16d9b41414a7cdcefc99d877c43591676689e1047173fc084b9e2ae15" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.506211 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-fqlww"] Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.514540 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-fqlww"] Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.569027 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-tj5bt"] Dec 03 18:03:00 crc kubenswrapper[5002]: E1203 18:03:00.569449 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3788c701-44fb-4959-b69e-920899cd9daa" containerName="keystone-bootstrap" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.569473 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="3788c701-44fb-4959-b69e-920899cd9daa" containerName="keystone-bootstrap" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.569649 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="3788c701-44fb-4959-b69e-920899cd9daa" containerName="keystone-bootstrap" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.570289 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.574183 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.574523 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.574553 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hpchv" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.575575 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.575804 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.580379 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tj5bt"] Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.632313 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-fernet-keys\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.632383 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-scripts\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.632465 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-combined-ca-bundle\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.632640 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-config-data\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.632889 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-credential-keys\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.632954 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5ckj\" (UniqueName: \"kubernetes.io/projected/2808c3ea-5a81-4602-8353-bee8f8d8c453-kube-api-access-n5ckj\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.734381 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5ckj\" (UniqueName: \"kubernetes.io/projected/2808c3ea-5a81-4602-8353-bee8f8d8c453-kube-api-access-n5ckj\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.734460 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-fernet-keys\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.734488 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-scripts\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.734549 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-combined-ca-bundle\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.734584 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-config-data\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.734618 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-credential-keys\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.738805 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-credential-keys\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.739091 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-config-data\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.739141 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-combined-ca-bundle\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.739560 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-scripts\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.739916 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-fernet-keys\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.752287 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5ckj\" (UniqueName: \"kubernetes.io/projected/2808c3ea-5a81-4602-8353-bee8f8d8c453-kube-api-access-n5ckj\") pod \"keystone-bootstrap-tj5bt\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.853053 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3788c701-44fb-4959-b69e-920899cd9daa" path="/var/lib/kubelet/pods/3788c701-44fb-4959-b69e-920899cd9daa/volumes" Dec 03 18:03:00 crc kubenswrapper[5002]: I1203 18:03:00.886114 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:01 crc kubenswrapper[5002]: I1203 18:03:01.346200 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tj5bt"] Dec 03 18:03:01 crc kubenswrapper[5002]: W1203 18:03:01.364447 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2808c3ea_5a81_4602_8353_bee8f8d8c453.slice/crio-13e9c19b06c6eb41238196aef8f5363a30681b596752668501a2262d5e4ac1c9 WatchSource:0}: Error finding container 13e9c19b06c6eb41238196aef8f5363a30681b596752668501a2262d5e4ac1c9: Status 404 returned error can't find the container with id 13e9c19b06c6eb41238196aef8f5363a30681b596752668501a2262d5e4ac1c9 Dec 03 18:03:01 crc kubenswrapper[5002]: I1203 18:03:01.409065 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tj5bt" event={"ID":"2808c3ea-5a81-4602-8353-bee8f8d8c453","Type":"ContainerStarted","Data":"13e9c19b06c6eb41238196aef8f5363a30681b596752668501a2262d5e4ac1c9"} Dec 03 18:03:02 crc kubenswrapper[5002]: I1203 18:03:02.418255 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tj5bt" event={"ID":"2808c3ea-5a81-4602-8353-bee8f8d8c453","Type":"ContainerStarted","Data":"43f285c3ff2359576a27562ea0191bb46301e83ee4e0161a8b0361bf6ce08d0e"} Dec 03 18:03:02 crc kubenswrapper[5002]: I1203 18:03:02.439569 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-tj5bt" podStartSLOduration=2.439544858 podStartE2EDuration="2.439544858s" podCreationTimestamp="2025-12-03 18:03:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:03:02.438389297 +0000 UTC m=+5505.852211215" watchObservedRunningTime="2025-12-03 18:03:02.439544858 +0000 UTC m=+5505.853366746" Dec 03 18:03:02 crc kubenswrapper[5002]: I1203 18:03:02.938969 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.028452 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59745b6b7-sgs9m"] Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.030387 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" podUID="c330ffba-d5bd-4165-bbd8-d39285920be0" containerName="dnsmasq-dns" containerID="cri-o://19093cbffd37b47fb097bf5af5c5fd133eef1f8378bacd19f97a5bf125abaf1e" gracePeriod=10 Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.434583 5002 generic.go:334] "Generic (PLEG): container finished" podID="c330ffba-d5bd-4165-bbd8-d39285920be0" containerID="19093cbffd37b47fb097bf5af5c5fd133eef1f8378bacd19f97a5bf125abaf1e" exitCode=0 Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.435711 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" event={"ID":"c330ffba-d5bd-4165-bbd8-d39285920be0","Type":"ContainerDied","Data":"19093cbffd37b47fb097bf5af5c5fd133eef1f8378bacd19f97a5bf125abaf1e"} Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.435770 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" event={"ID":"c330ffba-d5bd-4165-bbd8-d39285920be0","Type":"ContainerDied","Data":"940227b974aa5e1b7b0218aaa6592f5e369bae12053fd1805eab3d29787c9313"} Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.435790 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="940227b974aa5e1b7b0218aaa6592f5e369bae12053fd1805eab3d29787c9313" Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.471985 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.586224 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnhlz\" (UniqueName: \"kubernetes.io/projected/c330ffba-d5bd-4165-bbd8-d39285920be0-kube-api-access-lnhlz\") pod \"c330ffba-d5bd-4165-bbd8-d39285920be0\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.586320 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-dns-svc\") pod \"c330ffba-d5bd-4165-bbd8-d39285920be0\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.586416 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-ovsdbserver-sb\") pod \"c330ffba-d5bd-4165-bbd8-d39285920be0\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.586565 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-config\") pod \"c330ffba-d5bd-4165-bbd8-d39285920be0\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.586596 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-ovsdbserver-nb\") pod \"c330ffba-d5bd-4165-bbd8-d39285920be0\" (UID: \"c330ffba-d5bd-4165-bbd8-d39285920be0\") " Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.597677 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c330ffba-d5bd-4165-bbd8-d39285920be0-kube-api-access-lnhlz" (OuterVolumeSpecName: "kube-api-access-lnhlz") pod "c330ffba-d5bd-4165-bbd8-d39285920be0" (UID: "c330ffba-d5bd-4165-bbd8-d39285920be0"). InnerVolumeSpecName "kube-api-access-lnhlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.625520 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-config" (OuterVolumeSpecName: "config") pod "c330ffba-d5bd-4165-bbd8-d39285920be0" (UID: "c330ffba-d5bd-4165-bbd8-d39285920be0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.633049 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c330ffba-d5bd-4165-bbd8-d39285920be0" (UID: "c330ffba-d5bd-4165-bbd8-d39285920be0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.637436 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c330ffba-d5bd-4165-bbd8-d39285920be0" (UID: "c330ffba-d5bd-4165-bbd8-d39285920be0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.638435 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c330ffba-d5bd-4165-bbd8-d39285920be0" (UID: "c330ffba-d5bd-4165-bbd8-d39285920be0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.688496 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.688536 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.688553 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnhlz\" (UniqueName: \"kubernetes.io/projected/c330ffba-d5bd-4165-bbd8-d39285920be0-kube-api-access-lnhlz\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.688564 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:03 crc kubenswrapper[5002]: I1203 18:03:03.688573 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c330ffba-d5bd-4165-bbd8-d39285920be0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:04 crc kubenswrapper[5002]: I1203 18:03:04.446465 5002 generic.go:334] "Generic (PLEG): container finished" podID="2808c3ea-5a81-4602-8353-bee8f8d8c453" containerID="43f285c3ff2359576a27562ea0191bb46301e83ee4e0161a8b0361bf6ce08d0e" exitCode=0 Dec 03 18:03:04 crc kubenswrapper[5002]: I1203 18:03:04.446543 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tj5bt" event={"ID":"2808c3ea-5a81-4602-8353-bee8f8d8c453","Type":"ContainerDied","Data":"43f285c3ff2359576a27562ea0191bb46301e83ee4e0161a8b0361bf6ce08d0e"} Dec 03 18:03:04 crc kubenswrapper[5002]: I1203 18:03:04.446772 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59745b6b7-sgs9m" Dec 03 18:03:04 crc kubenswrapper[5002]: I1203 18:03:04.491363 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59745b6b7-sgs9m"] Dec 03 18:03:04 crc kubenswrapper[5002]: I1203 18:03:04.500004 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59745b6b7-sgs9m"] Dec 03 18:03:04 crc kubenswrapper[5002]: I1203 18:03:04.852951 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c330ffba-d5bd-4165-bbd8-d39285920be0" path="/var/lib/kubelet/pods/c330ffba-d5bd-4165-bbd8-d39285920be0/volumes" Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.820008 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.930893 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5ckj\" (UniqueName: \"kubernetes.io/projected/2808c3ea-5a81-4602-8353-bee8f8d8c453-kube-api-access-n5ckj\") pod \"2808c3ea-5a81-4602-8353-bee8f8d8c453\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.931013 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-config-data\") pod \"2808c3ea-5a81-4602-8353-bee8f8d8c453\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.931040 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-fernet-keys\") pod \"2808c3ea-5a81-4602-8353-bee8f8d8c453\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.931077 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-combined-ca-bundle\") pod \"2808c3ea-5a81-4602-8353-bee8f8d8c453\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.931136 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-scripts\") pod \"2808c3ea-5a81-4602-8353-bee8f8d8c453\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.931151 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-credential-keys\") pod \"2808c3ea-5a81-4602-8353-bee8f8d8c453\" (UID: \"2808c3ea-5a81-4602-8353-bee8f8d8c453\") " Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.945451 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "2808c3ea-5a81-4602-8353-bee8f8d8c453" (UID: "2808c3ea-5a81-4602-8353-bee8f8d8c453"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.946895 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "2808c3ea-5a81-4602-8353-bee8f8d8c453" (UID: "2808c3ea-5a81-4602-8353-bee8f8d8c453"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.950106 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-scripts" (OuterVolumeSpecName: "scripts") pod "2808c3ea-5a81-4602-8353-bee8f8d8c453" (UID: "2808c3ea-5a81-4602-8353-bee8f8d8c453"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.950932 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2808c3ea-5a81-4602-8353-bee8f8d8c453-kube-api-access-n5ckj" (OuterVolumeSpecName: "kube-api-access-n5ckj") pod "2808c3ea-5a81-4602-8353-bee8f8d8c453" (UID: "2808c3ea-5a81-4602-8353-bee8f8d8c453"). InnerVolumeSpecName "kube-api-access-n5ckj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.974440 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2808c3ea-5a81-4602-8353-bee8f8d8c453" (UID: "2808c3ea-5a81-4602-8353-bee8f8d8c453"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:03:05 crc kubenswrapper[5002]: I1203 18:03:05.986161 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-config-data" (OuterVolumeSpecName: "config-data") pod "2808c3ea-5a81-4602-8353-bee8f8d8c453" (UID: "2808c3ea-5a81-4602-8353-bee8f8d8c453"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.033093 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5ckj\" (UniqueName: \"kubernetes.io/projected/2808c3ea-5a81-4602-8353-bee8f8d8c453-kube-api-access-n5ckj\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.033131 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.033140 5002 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.033148 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.033156 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.033164 5002 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2808c3ea-5a81-4602-8353-bee8f8d8c453-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.466183 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tj5bt" event={"ID":"2808c3ea-5a81-4602-8353-bee8f8d8c453","Type":"ContainerDied","Data":"13e9c19b06c6eb41238196aef8f5363a30681b596752668501a2262d5e4ac1c9"} Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.466490 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13e9c19b06c6eb41238196aef8f5363a30681b596752668501a2262d5e4ac1c9" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.466322 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tj5bt" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.557419 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-75d97dc8b-p2q9t"] Dec 03 18:03:06 crc kubenswrapper[5002]: E1203 18:03:06.558032 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c330ffba-d5bd-4165-bbd8-d39285920be0" containerName="init" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.558057 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c330ffba-d5bd-4165-bbd8-d39285920be0" containerName="init" Dec 03 18:03:06 crc kubenswrapper[5002]: E1203 18:03:06.558104 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c330ffba-d5bd-4165-bbd8-d39285920be0" containerName="dnsmasq-dns" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.558115 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c330ffba-d5bd-4165-bbd8-d39285920be0" containerName="dnsmasq-dns" Dec 03 18:03:06 crc kubenswrapper[5002]: E1203 18:03:06.558136 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2808c3ea-5a81-4602-8353-bee8f8d8c453" containerName="keystone-bootstrap" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.558146 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2808c3ea-5a81-4602-8353-bee8f8d8c453" containerName="keystone-bootstrap" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.558431 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c330ffba-d5bd-4165-bbd8-d39285920be0" containerName="dnsmasq-dns" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.558459 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2808c3ea-5a81-4602-8353-bee8f8d8c453" containerName="keystone-bootstrap" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.559358 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.562880 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.563214 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.563585 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hpchv" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.564133 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.564572 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.567179 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.567772 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-75d97dc8b-p2q9t"] Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.746470 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-config-data\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.746521 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-scripts\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.746550 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-internal-tls-certs\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.746606 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-combined-ca-bundle\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.746632 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-fernet-keys\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.746662 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-credential-keys\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.746819 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-public-tls-certs\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.746928 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bvxf\" (UniqueName: \"kubernetes.io/projected/b74fe12c-0452-40fb-be2f-6d7024507a34-kube-api-access-7bvxf\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.847893 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-combined-ca-bundle\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.847933 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-fernet-keys\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.847964 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-credential-keys\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.848004 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-public-tls-certs\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.848028 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bvxf\" (UniqueName: \"kubernetes.io/projected/b74fe12c-0452-40fb-be2f-6d7024507a34-kube-api-access-7bvxf\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.848086 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-config-data\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.848111 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-scripts\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.848132 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-internal-tls-certs\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.852907 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-public-tls-certs\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.853059 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-combined-ca-bundle\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.853666 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-internal-tls-certs\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.857714 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-scripts\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.857790 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-credential-keys\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.858167 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-config-data\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.865663 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b74fe12c-0452-40fb-be2f-6d7024507a34-fernet-keys\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.869280 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bvxf\" (UniqueName: \"kubernetes.io/projected/b74fe12c-0452-40fb-be2f-6d7024507a34-kube-api-access-7bvxf\") pod \"keystone-75d97dc8b-p2q9t\" (UID: \"b74fe12c-0452-40fb-be2f-6d7024507a34\") " pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:06 crc kubenswrapper[5002]: I1203 18:03:06.883815 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:07 crc kubenswrapper[5002]: I1203 18:03:07.328901 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-75d97dc8b-p2q9t"] Dec 03 18:03:07 crc kubenswrapper[5002]: I1203 18:03:07.475712 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-75d97dc8b-p2q9t" event={"ID":"b74fe12c-0452-40fb-be2f-6d7024507a34","Type":"ContainerStarted","Data":"e4522cb6b979980b7bb79bf606e499857f98cefd25b7af061b58d3257ba583ea"} Dec 03 18:03:08 crc kubenswrapper[5002]: I1203 18:03:08.488614 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-75d97dc8b-p2q9t" event={"ID":"b74fe12c-0452-40fb-be2f-6d7024507a34","Type":"ContainerStarted","Data":"269e100be7087eab7142eb99d09953558cbbd8288110c097ca6943c5c29fe52c"} Dec 03 18:03:08 crc kubenswrapper[5002]: I1203 18:03:08.489134 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:08 crc kubenswrapper[5002]: I1203 18:03:08.519376 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-75d97dc8b-p2q9t" podStartSLOduration=2.519353932 podStartE2EDuration="2.519353932s" podCreationTimestamp="2025-12-03 18:03:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:03:08.515936839 +0000 UTC m=+5511.929758797" watchObservedRunningTime="2025-12-03 18:03:08.519353932 +0000 UTC m=+5511.933175820" Dec 03 18:03:11 crc kubenswrapper[5002]: I1203 18:03:11.841196 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:03:11 crc kubenswrapper[5002]: E1203 18:03:11.841681 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:03:23 crc kubenswrapper[5002]: I1203 18:03:23.841479 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:03:23 crc kubenswrapper[5002]: E1203 18:03:23.842769 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:03:37 crc kubenswrapper[5002]: I1203 18:03:37.840666 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:03:37 crc kubenswrapper[5002]: E1203 18:03:37.841486 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:03:38 crc kubenswrapper[5002]: I1203 18:03:38.472327 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-75d97dc8b-p2q9t" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.381131 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.386572 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.391139 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.391294 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.391703 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-jznfr" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.405316 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.498056 5002 scope.go:117] "RemoveContainer" containerID="f69e97e7cca483b1f1e2d0d7bd7fb2bafc4e541164ce4ba45ed423c1bfd7b867" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.523482 5002 scope.go:117] "RemoveContainer" containerID="5fab71267d08749ccd0eb51b2e4d13d376055a5d826daf8bd3b316ab390e0345" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.560664 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-combined-ca-bundle\") pod \"openstackclient\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.560903 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svjlt\" (UniqueName: \"kubernetes.io/projected/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-kube-api-access-svjlt\") pod \"openstackclient\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.561011 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-openstack-config\") pod \"openstackclient\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.561060 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-openstack-config-secret\") pod \"openstackclient\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.581404 5002 scope.go:117] "RemoveContainer" containerID="73574fec0559724b7855921401f25bf483b23da9a758d8cc8b2e1e2788edd7ce" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.626250 5002 scope.go:117] "RemoveContainer" containerID="6bff7622be464a398ae2990bf6dde7f63430fa018017eaadd417e8f76b73bdfd" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.663030 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-openstack-config\") pod \"openstackclient\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.663150 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-openstack-config-secret\") pod \"openstackclient\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.663375 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-combined-ca-bundle\") pod \"openstackclient\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.663514 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svjlt\" (UniqueName: \"kubernetes.io/projected/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-kube-api-access-svjlt\") pod \"openstackclient\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.667991 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-openstack-config\") pod \"openstackclient\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.680097 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-combined-ca-bundle\") pod \"openstackclient\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.681495 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-openstack-config-secret\") pod \"openstackclient\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.685086 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svjlt\" (UniqueName: \"kubernetes.io/projected/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-kube-api-access-svjlt\") pod \"openstackclient\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.687090 5002 scope.go:117] "RemoveContainer" containerID="38ea96ee4013ce1f9ebe3eff1a886f4146f4be30b9ddb3981021ab3593cabd4e" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.727905 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.749670 5002 scope.go:117] "RemoveContainer" containerID="6270943a8ab633596e0efde623b5328e9a7af8774de52a69dbb6ccd69815ed40" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.793889 5002 scope.go:117] "RemoveContainer" containerID="bdf7c0980e78cc578e85804716aa6ae7b31dbbc98ab902ce185b016f67a2e836" Dec 03 18:03:41 crc kubenswrapper[5002]: I1203 18:03:41.825455 5002 scope.go:117] "RemoveContainer" containerID="2ac1ba3163a6ab8315ff5c1742b68a316e2db2e06c4031534b4a211041ba7d42" Dec 03 18:03:42 crc kubenswrapper[5002]: I1203 18:03:42.266281 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 18:03:42 crc kubenswrapper[5002]: I1203 18:03:42.742950 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686","Type":"ContainerStarted","Data":"27a070da1cd52dbcdc6abcec2f578745bb745df31095d097e4311d7fdd005577"} Dec 03 18:03:42 crc kubenswrapper[5002]: I1203 18:03:42.743006 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686","Type":"ContainerStarted","Data":"845d22a134620dba416fd8d2e183c993881bfc5fd8bf7bbe25f1f8fc701a6e65"} Dec 03 18:03:42 crc kubenswrapper[5002]: I1203 18:03:42.764647 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.764623978 podStartE2EDuration="1.764623978s" podCreationTimestamp="2025-12-03 18:03:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:03:42.760252181 +0000 UTC m=+5546.174074059" watchObservedRunningTime="2025-12-03 18:03:42.764623978 +0000 UTC m=+5546.178445866" Dec 03 18:03:47 crc kubenswrapper[5002]: I1203 18:03:47.708027 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z7h6v"] Dec 03 18:03:47 crc kubenswrapper[5002]: I1203 18:03:47.710611 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:47 crc kubenswrapper[5002]: I1203 18:03:47.721990 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z7h6v"] Dec 03 18:03:47 crc kubenswrapper[5002]: I1203 18:03:47.790095 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f45221a0-784c-45d4-903f-f1da4d968066-catalog-content\") pod \"redhat-operators-z7h6v\" (UID: \"f45221a0-784c-45d4-903f-f1da4d968066\") " pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:47 crc kubenswrapper[5002]: I1203 18:03:47.790186 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tqj6\" (UniqueName: \"kubernetes.io/projected/f45221a0-784c-45d4-903f-f1da4d968066-kube-api-access-4tqj6\") pod \"redhat-operators-z7h6v\" (UID: \"f45221a0-784c-45d4-903f-f1da4d968066\") " pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:47 crc kubenswrapper[5002]: I1203 18:03:47.790436 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f45221a0-784c-45d4-903f-f1da4d968066-utilities\") pod \"redhat-operators-z7h6v\" (UID: \"f45221a0-784c-45d4-903f-f1da4d968066\") " pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:47 crc kubenswrapper[5002]: I1203 18:03:47.891958 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f45221a0-784c-45d4-903f-f1da4d968066-utilities\") pod \"redhat-operators-z7h6v\" (UID: \"f45221a0-784c-45d4-903f-f1da4d968066\") " pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:47 crc kubenswrapper[5002]: I1203 18:03:47.892066 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f45221a0-784c-45d4-903f-f1da4d968066-catalog-content\") pod \"redhat-operators-z7h6v\" (UID: \"f45221a0-784c-45d4-903f-f1da4d968066\") " pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:47 crc kubenswrapper[5002]: I1203 18:03:47.892100 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tqj6\" (UniqueName: \"kubernetes.io/projected/f45221a0-784c-45d4-903f-f1da4d968066-kube-api-access-4tqj6\") pod \"redhat-operators-z7h6v\" (UID: \"f45221a0-784c-45d4-903f-f1da4d968066\") " pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:47 crc kubenswrapper[5002]: I1203 18:03:47.892506 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f45221a0-784c-45d4-903f-f1da4d968066-utilities\") pod \"redhat-operators-z7h6v\" (UID: \"f45221a0-784c-45d4-903f-f1da4d968066\") " pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:47 crc kubenswrapper[5002]: I1203 18:03:47.892682 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f45221a0-784c-45d4-903f-f1da4d968066-catalog-content\") pod \"redhat-operators-z7h6v\" (UID: \"f45221a0-784c-45d4-903f-f1da4d968066\") " pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:47 crc kubenswrapper[5002]: I1203 18:03:47.914945 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tqj6\" (UniqueName: \"kubernetes.io/projected/f45221a0-784c-45d4-903f-f1da4d968066-kube-api-access-4tqj6\") pod \"redhat-operators-z7h6v\" (UID: \"f45221a0-784c-45d4-903f-f1da4d968066\") " pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:48 crc kubenswrapper[5002]: I1203 18:03:48.044443 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:48 crc kubenswrapper[5002]: I1203 18:03:48.327179 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z7h6v"] Dec 03 18:03:48 crc kubenswrapper[5002]: I1203 18:03:48.813391 5002 generic.go:334] "Generic (PLEG): container finished" podID="f45221a0-784c-45d4-903f-f1da4d968066" containerID="cc16e43b6f87f4e584de8f1c8035f1f3ce093847ae3312ce562d61609165306a" exitCode=0 Dec 03 18:03:48 crc kubenswrapper[5002]: I1203 18:03:48.813654 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z7h6v" event={"ID":"f45221a0-784c-45d4-903f-f1da4d968066","Type":"ContainerDied","Data":"cc16e43b6f87f4e584de8f1c8035f1f3ce093847ae3312ce562d61609165306a"} Dec 03 18:03:48 crc kubenswrapper[5002]: I1203 18:03:48.814798 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z7h6v" event={"ID":"f45221a0-784c-45d4-903f-f1da4d968066","Type":"ContainerStarted","Data":"8c91678a54cc8f78dc5e191cb826075a7315e5526fb299a0604612f9af508f9c"} Dec 03 18:03:50 crc kubenswrapper[5002]: I1203 18:03:50.833901 5002 generic.go:334] "Generic (PLEG): container finished" podID="f45221a0-784c-45d4-903f-f1da4d968066" containerID="4b2c80a934f8180c2c47236abe0b7641eb1d2191a4f3e2c5fd68e426869ae5e9" exitCode=0 Dec 03 18:03:50 crc kubenswrapper[5002]: I1203 18:03:50.834035 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z7h6v" event={"ID":"f45221a0-784c-45d4-903f-f1da4d968066","Type":"ContainerDied","Data":"4b2c80a934f8180c2c47236abe0b7641eb1d2191a4f3e2c5fd68e426869ae5e9"} Dec 03 18:03:51 crc kubenswrapper[5002]: I1203 18:03:51.844524 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z7h6v" event={"ID":"f45221a0-784c-45d4-903f-f1da4d968066","Type":"ContainerStarted","Data":"12ff264ac4ceafa9ca92bd637e2f7a5f20401f909387a6980574d745d8eb5920"} Dec 03 18:03:51 crc kubenswrapper[5002]: I1203 18:03:51.868613 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z7h6v" podStartSLOduration=2.047969026 podStartE2EDuration="4.868592399s" podCreationTimestamp="2025-12-03 18:03:47 +0000 UTC" firstStartedPulling="2025-12-03 18:03:48.815276656 +0000 UTC m=+5552.229098544" lastFinishedPulling="2025-12-03 18:03:51.635900019 +0000 UTC m=+5555.049721917" observedRunningTime="2025-12-03 18:03:51.861829927 +0000 UTC m=+5555.275651815" watchObservedRunningTime="2025-12-03 18:03:51.868592399 +0000 UTC m=+5555.282414287" Dec 03 18:03:52 crc kubenswrapper[5002]: I1203 18:03:52.841340 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:03:52 crc kubenswrapper[5002]: E1203 18:03:52.841670 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:03:58 crc kubenswrapper[5002]: I1203 18:03:58.045251 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:58 crc kubenswrapper[5002]: I1203 18:03:58.046704 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:58 crc kubenswrapper[5002]: I1203 18:03:58.122738 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:58 crc kubenswrapper[5002]: I1203 18:03:58.971496 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:03:59 crc kubenswrapper[5002]: I1203 18:03:59.027993 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z7h6v"] Dec 03 18:04:00 crc kubenswrapper[5002]: I1203 18:04:00.937301 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z7h6v" podUID="f45221a0-784c-45d4-903f-f1da4d968066" containerName="registry-server" containerID="cri-o://12ff264ac4ceafa9ca92bd637e2f7a5f20401f909387a6980574d745d8eb5920" gracePeriod=2 Dec 03 18:04:04 crc kubenswrapper[5002]: I1203 18:04:04.976056 5002 generic.go:334] "Generic (PLEG): container finished" podID="f45221a0-784c-45d4-903f-f1da4d968066" containerID="12ff264ac4ceafa9ca92bd637e2f7a5f20401f909387a6980574d745d8eb5920" exitCode=0 Dec 03 18:04:04 crc kubenswrapper[5002]: I1203 18:04:04.976205 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z7h6v" event={"ID":"f45221a0-784c-45d4-903f-f1da4d968066","Type":"ContainerDied","Data":"12ff264ac4ceafa9ca92bd637e2f7a5f20401f909387a6980574d745d8eb5920"} Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.107799 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.201082 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tqj6\" (UniqueName: \"kubernetes.io/projected/f45221a0-784c-45d4-903f-f1da4d968066-kube-api-access-4tqj6\") pod \"f45221a0-784c-45d4-903f-f1da4d968066\" (UID: \"f45221a0-784c-45d4-903f-f1da4d968066\") " Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.201176 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f45221a0-784c-45d4-903f-f1da4d968066-utilities\") pod \"f45221a0-784c-45d4-903f-f1da4d968066\" (UID: \"f45221a0-784c-45d4-903f-f1da4d968066\") " Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.201272 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f45221a0-784c-45d4-903f-f1da4d968066-catalog-content\") pod \"f45221a0-784c-45d4-903f-f1da4d968066\" (UID: \"f45221a0-784c-45d4-903f-f1da4d968066\") " Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.202919 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f45221a0-784c-45d4-903f-f1da4d968066-utilities" (OuterVolumeSpecName: "utilities") pod "f45221a0-784c-45d4-903f-f1da4d968066" (UID: "f45221a0-784c-45d4-903f-f1da4d968066"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.207998 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f45221a0-784c-45d4-903f-f1da4d968066-kube-api-access-4tqj6" (OuterVolumeSpecName: "kube-api-access-4tqj6") pod "f45221a0-784c-45d4-903f-f1da4d968066" (UID: "f45221a0-784c-45d4-903f-f1da4d968066"). InnerVolumeSpecName "kube-api-access-4tqj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.303317 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tqj6\" (UniqueName: \"kubernetes.io/projected/f45221a0-784c-45d4-903f-f1da4d968066-kube-api-access-4tqj6\") on node \"crc\" DevicePath \"\"" Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.303360 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f45221a0-784c-45d4-903f-f1da4d968066-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.324001 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f45221a0-784c-45d4-903f-f1da4d968066-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f45221a0-784c-45d4-903f-f1da4d968066" (UID: "f45221a0-784c-45d4-903f-f1da4d968066"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.404701 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f45221a0-784c-45d4-903f-f1da4d968066-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.987683 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z7h6v" event={"ID":"f45221a0-784c-45d4-903f-f1da4d968066","Type":"ContainerDied","Data":"8c91678a54cc8f78dc5e191cb826075a7315e5526fb299a0604612f9af508f9c"} Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.987736 5002 scope.go:117] "RemoveContainer" containerID="12ff264ac4ceafa9ca92bd637e2f7a5f20401f909387a6980574d745d8eb5920" Dec 03 18:04:05 crc kubenswrapper[5002]: I1203 18:04:05.988735 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z7h6v" Dec 03 18:04:06 crc kubenswrapper[5002]: I1203 18:04:06.011848 5002 scope.go:117] "RemoveContainer" containerID="4b2c80a934f8180c2c47236abe0b7641eb1d2191a4f3e2c5fd68e426869ae5e9" Dec 03 18:04:06 crc kubenswrapper[5002]: I1203 18:04:06.046587 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z7h6v"] Dec 03 18:04:06 crc kubenswrapper[5002]: I1203 18:04:06.059032 5002 scope.go:117] "RemoveContainer" containerID="cc16e43b6f87f4e584de8f1c8035f1f3ce093847ae3312ce562d61609165306a" Dec 03 18:04:06 crc kubenswrapper[5002]: I1203 18:04:06.067301 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z7h6v"] Dec 03 18:04:06 crc kubenswrapper[5002]: I1203 18:04:06.840832 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:04:06 crc kubenswrapper[5002]: E1203 18:04:06.841654 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:04:06 crc kubenswrapper[5002]: I1203 18:04:06.852962 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f45221a0-784c-45d4-903f-f1da4d968066" path="/var/lib/kubelet/pods/f45221a0-784c-45d4-903f-f1da4d968066/volumes" Dec 03 18:04:18 crc kubenswrapper[5002]: I1203 18:04:18.841584 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:04:18 crc kubenswrapper[5002]: E1203 18:04:18.842798 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:04:32 crc kubenswrapper[5002]: I1203 18:04:32.843982 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:04:33 crc kubenswrapper[5002]: I1203 18:04:33.215679 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"f5d0d4d14dd7dac7b94bb6ad5c76c5a78375d0dbd75748edfcbc430636410612"} Dec 03 18:04:57 crc kubenswrapper[5002]: E1203 18:04:57.394600 5002 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.155:41226->38.102.83.155:42635: write tcp 38.102.83.155:41226->38.102.83.155:42635: write: broken pipe Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.332729 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-fdnpl"] Dec 03 18:05:20 crc kubenswrapper[5002]: E1203 18:05:20.333629 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f45221a0-784c-45d4-903f-f1da4d968066" containerName="extract-utilities" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.333645 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f45221a0-784c-45d4-903f-f1da4d968066" containerName="extract-utilities" Dec 03 18:05:20 crc kubenswrapper[5002]: E1203 18:05:20.333663 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f45221a0-784c-45d4-903f-f1da4d968066" containerName="registry-server" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.333669 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f45221a0-784c-45d4-903f-f1da4d968066" containerName="registry-server" Dec 03 18:05:20 crc kubenswrapper[5002]: E1203 18:05:20.333682 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f45221a0-784c-45d4-903f-f1da4d968066" containerName="extract-content" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.333689 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f45221a0-784c-45d4-903f-f1da4d968066" containerName="extract-content" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.333884 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f45221a0-784c-45d4-903f-f1da4d968066" containerName="registry-server" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.334423 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fdnpl" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.344038 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-fdnpl"] Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.415370 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-a1a9-account-create-update-9pdzb"] Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.416360 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a1a9-account-create-update-9pdzb" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.417969 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.427915 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-a1a9-account-create-update-9pdzb"] Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.456469 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eec77f3a-6569-4f1e-972a-004d6511941c-operator-scripts\") pod \"barbican-db-create-fdnpl\" (UID: \"eec77f3a-6569-4f1e-972a-004d6511941c\") " pod="openstack/barbican-db-create-fdnpl" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.456529 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxvvw\" (UniqueName: \"kubernetes.io/projected/eec77f3a-6569-4f1e-972a-004d6511941c-kube-api-access-mxvvw\") pod \"barbican-db-create-fdnpl\" (UID: \"eec77f3a-6569-4f1e-972a-004d6511941c\") " pod="openstack/barbican-db-create-fdnpl" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.558461 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxvvw\" (UniqueName: \"kubernetes.io/projected/eec77f3a-6569-4f1e-972a-004d6511941c-kube-api-access-mxvvw\") pod \"barbican-db-create-fdnpl\" (UID: \"eec77f3a-6569-4f1e-972a-004d6511941c\") " pod="openstack/barbican-db-create-fdnpl" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.558620 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/673ac43c-540d-4675-bcd2-328da5e9a17e-operator-scripts\") pod \"barbican-a1a9-account-create-update-9pdzb\" (UID: \"673ac43c-540d-4675-bcd2-328da5e9a17e\") " pod="openstack/barbican-a1a9-account-create-update-9pdzb" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.558652 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eec77f3a-6569-4f1e-972a-004d6511941c-operator-scripts\") pod \"barbican-db-create-fdnpl\" (UID: \"eec77f3a-6569-4f1e-972a-004d6511941c\") " pod="openstack/barbican-db-create-fdnpl" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.559346 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp25h\" (UniqueName: \"kubernetes.io/projected/673ac43c-540d-4675-bcd2-328da5e9a17e-kube-api-access-zp25h\") pod \"barbican-a1a9-account-create-update-9pdzb\" (UID: \"673ac43c-540d-4675-bcd2-328da5e9a17e\") " pod="openstack/barbican-a1a9-account-create-update-9pdzb" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.560140 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eec77f3a-6569-4f1e-972a-004d6511941c-operator-scripts\") pod \"barbican-db-create-fdnpl\" (UID: \"eec77f3a-6569-4f1e-972a-004d6511941c\") " pod="openstack/barbican-db-create-fdnpl" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.582726 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxvvw\" (UniqueName: \"kubernetes.io/projected/eec77f3a-6569-4f1e-972a-004d6511941c-kube-api-access-mxvvw\") pod \"barbican-db-create-fdnpl\" (UID: \"eec77f3a-6569-4f1e-972a-004d6511941c\") " pod="openstack/barbican-db-create-fdnpl" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.653333 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fdnpl" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.661355 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/673ac43c-540d-4675-bcd2-328da5e9a17e-operator-scripts\") pod \"barbican-a1a9-account-create-update-9pdzb\" (UID: \"673ac43c-540d-4675-bcd2-328da5e9a17e\") " pod="openstack/barbican-a1a9-account-create-update-9pdzb" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.661414 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp25h\" (UniqueName: \"kubernetes.io/projected/673ac43c-540d-4675-bcd2-328da5e9a17e-kube-api-access-zp25h\") pod \"barbican-a1a9-account-create-update-9pdzb\" (UID: \"673ac43c-540d-4675-bcd2-328da5e9a17e\") " pod="openstack/barbican-a1a9-account-create-update-9pdzb" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.662604 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/673ac43c-540d-4675-bcd2-328da5e9a17e-operator-scripts\") pod \"barbican-a1a9-account-create-update-9pdzb\" (UID: \"673ac43c-540d-4675-bcd2-328da5e9a17e\") " pod="openstack/barbican-a1a9-account-create-update-9pdzb" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.680533 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp25h\" (UniqueName: \"kubernetes.io/projected/673ac43c-540d-4675-bcd2-328da5e9a17e-kube-api-access-zp25h\") pod \"barbican-a1a9-account-create-update-9pdzb\" (UID: \"673ac43c-540d-4675-bcd2-328da5e9a17e\") " pod="openstack/barbican-a1a9-account-create-update-9pdzb" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.741642 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a1a9-account-create-update-9pdzb" Dec 03 18:05:20 crc kubenswrapper[5002]: I1203 18:05:20.933116 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-fdnpl"] Dec 03 18:05:21 crc kubenswrapper[5002]: I1203 18:05:21.095099 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-a1a9-account-create-update-9pdzb"] Dec 03 18:05:21 crc kubenswrapper[5002]: I1203 18:05:21.628265 5002 generic.go:334] "Generic (PLEG): container finished" podID="673ac43c-540d-4675-bcd2-328da5e9a17e" containerID="50df0b2db5432a8718c0d2498dc56ecfb3f9a08b76ba24c685c2b85b57680ce1" exitCode=0 Dec 03 18:05:21 crc kubenswrapper[5002]: I1203 18:05:21.628376 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a1a9-account-create-update-9pdzb" event={"ID":"673ac43c-540d-4675-bcd2-328da5e9a17e","Type":"ContainerDied","Data":"50df0b2db5432a8718c0d2498dc56ecfb3f9a08b76ba24c685c2b85b57680ce1"} Dec 03 18:05:21 crc kubenswrapper[5002]: I1203 18:05:21.628434 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a1a9-account-create-update-9pdzb" event={"ID":"673ac43c-540d-4675-bcd2-328da5e9a17e","Type":"ContainerStarted","Data":"49e9b35a27bfd8deef0ae2977a222852c3e4091c218808a54aa6e3dfbaa63126"} Dec 03 18:05:21 crc kubenswrapper[5002]: I1203 18:05:21.631239 5002 generic.go:334] "Generic (PLEG): container finished" podID="eec77f3a-6569-4f1e-972a-004d6511941c" containerID="8c145f7f66a01193762b70226f1c1a68465edbaf58e20bb2358f8e36e342900e" exitCode=0 Dec 03 18:05:21 crc kubenswrapper[5002]: I1203 18:05:21.631296 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-fdnpl" event={"ID":"eec77f3a-6569-4f1e-972a-004d6511941c","Type":"ContainerDied","Data":"8c145f7f66a01193762b70226f1c1a68465edbaf58e20bb2358f8e36e342900e"} Dec 03 18:05:21 crc kubenswrapper[5002]: I1203 18:05:21.631334 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-fdnpl" event={"ID":"eec77f3a-6569-4f1e-972a-004d6511941c","Type":"ContainerStarted","Data":"2304a9ad943074d9fa80bbd3c52491e5c7581b29c91a3d5243353d0edfcf8e5e"} Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.027240 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a1a9-account-create-update-9pdzb" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.033252 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fdnpl" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.204354 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zp25h\" (UniqueName: \"kubernetes.io/projected/673ac43c-540d-4675-bcd2-328da5e9a17e-kube-api-access-zp25h\") pod \"673ac43c-540d-4675-bcd2-328da5e9a17e\" (UID: \"673ac43c-540d-4675-bcd2-328da5e9a17e\") " Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.204411 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxvvw\" (UniqueName: \"kubernetes.io/projected/eec77f3a-6569-4f1e-972a-004d6511941c-kube-api-access-mxvvw\") pod \"eec77f3a-6569-4f1e-972a-004d6511941c\" (UID: \"eec77f3a-6569-4f1e-972a-004d6511941c\") " Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.204434 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/673ac43c-540d-4675-bcd2-328da5e9a17e-operator-scripts\") pod \"673ac43c-540d-4675-bcd2-328da5e9a17e\" (UID: \"673ac43c-540d-4675-bcd2-328da5e9a17e\") " Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.204491 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eec77f3a-6569-4f1e-972a-004d6511941c-operator-scripts\") pod \"eec77f3a-6569-4f1e-972a-004d6511941c\" (UID: \"eec77f3a-6569-4f1e-972a-004d6511941c\") " Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.205615 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eec77f3a-6569-4f1e-972a-004d6511941c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eec77f3a-6569-4f1e-972a-004d6511941c" (UID: "eec77f3a-6569-4f1e-972a-004d6511941c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.205688 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/673ac43c-540d-4675-bcd2-328da5e9a17e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "673ac43c-540d-4675-bcd2-328da5e9a17e" (UID: "673ac43c-540d-4675-bcd2-328da5e9a17e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.210446 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/673ac43c-540d-4675-bcd2-328da5e9a17e-kube-api-access-zp25h" (OuterVolumeSpecName: "kube-api-access-zp25h") pod "673ac43c-540d-4675-bcd2-328da5e9a17e" (UID: "673ac43c-540d-4675-bcd2-328da5e9a17e"). InnerVolumeSpecName "kube-api-access-zp25h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.210572 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eec77f3a-6569-4f1e-972a-004d6511941c-kube-api-access-mxvvw" (OuterVolumeSpecName: "kube-api-access-mxvvw") pod "eec77f3a-6569-4f1e-972a-004d6511941c" (UID: "eec77f3a-6569-4f1e-972a-004d6511941c"). InnerVolumeSpecName "kube-api-access-mxvvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.306053 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zp25h\" (UniqueName: \"kubernetes.io/projected/673ac43c-540d-4675-bcd2-328da5e9a17e-kube-api-access-zp25h\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.306081 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxvvw\" (UniqueName: \"kubernetes.io/projected/eec77f3a-6569-4f1e-972a-004d6511941c-kube-api-access-mxvvw\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.306090 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/673ac43c-540d-4675-bcd2-328da5e9a17e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.306099 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eec77f3a-6569-4f1e-972a-004d6511941c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.649861 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a1a9-account-create-update-9pdzb" event={"ID":"673ac43c-540d-4675-bcd2-328da5e9a17e","Type":"ContainerDied","Data":"49e9b35a27bfd8deef0ae2977a222852c3e4091c218808a54aa6e3dfbaa63126"} Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.649916 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49e9b35a27bfd8deef0ae2977a222852c3e4091c218808a54aa6e3dfbaa63126" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.649869 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a1a9-account-create-update-9pdzb" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.651311 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-fdnpl" event={"ID":"eec77f3a-6569-4f1e-972a-004d6511941c","Type":"ContainerDied","Data":"2304a9ad943074d9fa80bbd3c52491e5c7581b29c91a3d5243353d0edfcf8e5e"} Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.651358 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2304a9ad943074d9fa80bbd3c52491e5c7581b29c91a3d5243353d0edfcf8e5e" Dec 03 18:05:23 crc kubenswrapper[5002]: I1203 18:05:23.651360 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fdnpl" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.688550 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-pb6mc"] Dec 03 18:05:25 crc kubenswrapper[5002]: E1203 18:05:25.688958 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="673ac43c-540d-4675-bcd2-328da5e9a17e" containerName="mariadb-account-create-update" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.688973 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="673ac43c-540d-4675-bcd2-328da5e9a17e" containerName="mariadb-account-create-update" Dec 03 18:05:25 crc kubenswrapper[5002]: E1203 18:05:25.688998 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eec77f3a-6569-4f1e-972a-004d6511941c" containerName="mariadb-database-create" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.689007 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="eec77f3a-6569-4f1e-972a-004d6511941c" containerName="mariadb-database-create" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.689220 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="673ac43c-540d-4675-bcd2-328da5e9a17e" containerName="mariadb-account-create-update" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.689247 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="eec77f3a-6569-4f1e-972a-004d6511941c" containerName="mariadb-database-create" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.689954 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.692544 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.693704 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-j8884" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.702085 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-pb6mc"] Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.749533 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bf679aa-747a-461b-9ead-521e718c9bdd-combined-ca-bundle\") pod \"barbican-db-sync-pb6mc\" (UID: \"4bf679aa-747a-461b-9ead-521e718c9bdd\") " pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.749827 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7wx2\" (UniqueName: \"kubernetes.io/projected/4bf679aa-747a-461b-9ead-521e718c9bdd-kube-api-access-k7wx2\") pod \"barbican-db-sync-pb6mc\" (UID: \"4bf679aa-747a-461b-9ead-521e718c9bdd\") " pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.749916 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4bf679aa-747a-461b-9ead-521e718c9bdd-db-sync-config-data\") pod \"barbican-db-sync-pb6mc\" (UID: \"4bf679aa-747a-461b-9ead-521e718c9bdd\") " pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.851542 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bf679aa-747a-461b-9ead-521e718c9bdd-combined-ca-bundle\") pod \"barbican-db-sync-pb6mc\" (UID: \"4bf679aa-747a-461b-9ead-521e718c9bdd\") " pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.852075 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7wx2\" (UniqueName: \"kubernetes.io/projected/4bf679aa-747a-461b-9ead-521e718c9bdd-kube-api-access-k7wx2\") pod \"barbican-db-sync-pb6mc\" (UID: \"4bf679aa-747a-461b-9ead-521e718c9bdd\") " pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.852382 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4bf679aa-747a-461b-9ead-521e718c9bdd-db-sync-config-data\") pod \"barbican-db-sync-pb6mc\" (UID: \"4bf679aa-747a-461b-9ead-521e718c9bdd\") " pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.856301 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4bf679aa-747a-461b-9ead-521e718c9bdd-db-sync-config-data\") pod \"barbican-db-sync-pb6mc\" (UID: \"4bf679aa-747a-461b-9ead-521e718c9bdd\") " pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.867505 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bf679aa-747a-461b-9ead-521e718c9bdd-combined-ca-bundle\") pod \"barbican-db-sync-pb6mc\" (UID: \"4bf679aa-747a-461b-9ead-521e718c9bdd\") " pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:25 crc kubenswrapper[5002]: I1203 18:05:25.868335 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7wx2\" (UniqueName: \"kubernetes.io/projected/4bf679aa-747a-461b-9ead-521e718c9bdd-kube-api-access-k7wx2\") pod \"barbican-db-sync-pb6mc\" (UID: \"4bf679aa-747a-461b-9ead-521e718c9bdd\") " pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:26 crc kubenswrapper[5002]: I1203 18:05:26.009960 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:26 crc kubenswrapper[5002]: I1203 18:05:26.542652 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-pb6mc"] Dec 03 18:05:26 crc kubenswrapper[5002]: I1203 18:05:26.677273 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-pb6mc" event={"ID":"4bf679aa-747a-461b-9ead-521e718c9bdd","Type":"ContainerStarted","Data":"f1a71f7ecd6c8761afb121f280412fc203f02a5b64f335506d3249f64ef764a2"} Dec 03 18:05:27 crc kubenswrapper[5002]: I1203 18:05:27.688523 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-pb6mc" event={"ID":"4bf679aa-747a-461b-9ead-521e718c9bdd","Type":"ContainerStarted","Data":"3403585b659d9fb9dadb1df3e0ec0a35ee461b510368829c065d843322e8d231"} Dec 03 18:05:27 crc kubenswrapper[5002]: I1203 18:05:27.711579 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-pb6mc" podStartSLOduration=2.7115565139999998 podStartE2EDuration="2.711556514s" podCreationTimestamp="2025-12-03 18:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:05:27.705159583 +0000 UTC m=+5651.118981481" watchObservedRunningTime="2025-12-03 18:05:27.711556514 +0000 UTC m=+5651.125378402" Dec 03 18:05:29 crc kubenswrapper[5002]: I1203 18:05:29.710177 5002 generic.go:334] "Generic (PLEG): container finished" podID="4bf679aa-747a-461b-9ead-521e718c9bdd" containerID="3403585b659d9fb9dadb1df3e0ec0a35ee461b510368829c065d843322e8d231" exitCode=0 Dec 03 18:05:29 crc kubenswrapper[5002]: I1203 18:05:29.710263 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-pb6mc" event={"ID":"4bf679aa-747a-461b-9ead-521e718c9bdd","Type":"ContainerDied","Data":"3403585b659d9fb9dadb1df3e0ec0a35ee461b510368829c065d843322e8d231"} Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.095497 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.260697 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7wx2\" (UniqueName: \"kubernetes.io/projected/4bf679aa-747a-461b-9ead-521e718c9bdd-kube-api-access-k7wx2\") pod \"4bf679aa-747a-461b-9ead-521e718c9bdd\" (UID: \"4bf679aa-747a-461b-9ead-521e718c9bdd\") " Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.261409 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4bf679aa-747a-461b-9ead-521e718c9bdd-db-sync-config-data\") pod \"4bf679aa-747a-461b-9ead-521e718c9bdd\" (UID: \"4bf679aa-747a-461b-9ead-521e718c9bdd\") " Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.261685 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bf679aa-747a-461b-9ead-521e718c9bdd-combined-ca-bundle\") pod \"4bf679aa-747a-461b-9ead-521e718c9bdd\" (UID: \"4bf679aa-747a-461b-9ead-521e718c9bdd\") " Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.276727 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bf679aa-747a-461b-9ead-521e718c9bdd-kube-api-access-k7wx2" (OuterVolumeSpecName: "kube-api-access-k7wx2") pod "4bf679aa-747a-461b-9ead-521e718c9bdd" (UID: "4bf679aa-747a-461b-9ead-521e718c9bdd"). InnerVolumeSpecName "kube-api-access-k7wx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.276900 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bf679aa-747a-461b-9ead-521e718c9bdd-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4bf679aa-747a-461b-9ead-521e718c9bdd" (UID: "4bf679aa-747a-461b-9ead-521e718c9bdd"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.289314 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bf679aa-747a-461b-9ead-521e718c9bdd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4bf679aa-747a-461b-9ead-521e718c9bdd" (UID: "4bf679aa-747a-461b-9ead-521e718c9bdd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.364064 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7wx2\" (UniqueName: \"kubernetes.io/projected/4bf679aa-747a-461b-9ead-521e718c9bdd-kube-api-access-k7wx2\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.364114 5002 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4bf679aa-747a-461b-9ead-521e718c9bdd-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.364128 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bf679aa-747a-461b-9ead-521e718c9bdd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.733990 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-pb6mc" event={"ID":"4bf679aa-747a-461b-9ead-521e718c9bdd","Type":"ContainerDied","Data":"f1a71f7ecd6c8761afb121f280412fc203f02a5b64f335506d3249f64ef764a2"} Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.734040 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f1a71f7ecd6c8761afb121f280412fc203f02a5b64f335506d3249f64ef764a2" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.734384 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-pb6mc" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.983760 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6c75fb47bc-c7g9j"] Dec 03 18:05:31 crc kubenswrapper[5002]: E1203 18:05:31.984176 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bf679aa-747a-461b-9ead-521e718c9bdd" containerName="barbican-db-sync" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.984193 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bf679aa-747a-461b-9ead-521e718c9bdd" containerName="barbican-db-sync" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.984409 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bf679aa-747a-461b-9ead-521e718c9bdd" containerName="barbican-db-sync" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.985381 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.989236 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-j8884" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.989501 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 18:05:31 crc kubenswrapper[5002]: I1203 18:05:31.991091 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.007452 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6c75fb47bc-c7g9j"] Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.034043 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6f4548857b-5c9mn"] Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.036226 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.040989 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.075262 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6f4548857b-5c9mn"] Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.132847 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d44f4f795-bdtdw"] Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.134474 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.160875 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d44f4f795-bdtdw"] Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.187171 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npjgl\" (UniqueName: \"kubernetes.io/projected/aad241d9-99ca-4f3c-b980-ab2f989b754f-kube-api-access-npjgl\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.187251 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-config-data-custom\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.187278 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aad241d9-99ca-4f3c-b980-ab2f989b754f-combined-ca-bundle\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.187326 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aad241d9-99ca-4f3c-b980-ab2f989b754f-config-data-custom\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.187993 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-combined-ca-bundle\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.188078 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aad241d9-99ca-4f3c-b980-ab2f989b754f-config-data\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.188107 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aad241d9-99ca-4f3c-b980-ab2f989b754f-logs\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.188147 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-config-data\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.188209 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xv5r\" (UniqueName: \"kubernetes.io/projected/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-kube-api-access-4xv5r\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.188257 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-logs\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.244893 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-84d467b9b8-4lqh2"] Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.247233 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.249735 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.269844 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-84d467b9b8-4lqh2"] Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.290801 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-dns-svc\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.290890 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aad241d9-99ca-4f3c-b980-ab2f989b754f-config-data-custom\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.290951 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-combined-ca-bundle\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.291027 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-ovsdbserver-sb\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.291057 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aad241d9-99ca-4f3c-b980-ab2f989b754f-config-data\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.291082 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aad241d9-99ca-4f3c-b980-ab2f989b754f-logs\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.291157 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-config-data\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.291225 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xv5r\" (UniqueName: \"kubernetes.io/projected/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-kube-api-access-4xv5r\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.291282 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-config\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.291346 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-logs\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.291386 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf7l2\" (UniqueName: \"kubernetes.io/projected/9005c62c-87ce-434f-8781-d337d52a7056-kube-api-access-cf7l2\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.292569 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npjgl\" (UniqueName: \"kubernetes.io/projected/aad241d9-99ca-4f3c-b980-ab2f989b754f-kube-api-access-npjgl\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.292635 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-ovsdbserver-nb\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.292690 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-config-data-custom\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.292715 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aad241d9-99ca-4f3c-b980-ab2f989b754f-combined-ca-bundle\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.292870 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-logs\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.293770 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aad241d9-99ca-4f3c-b980-ab2f989b754f-logs\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.306556 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aad241d9-99ca-4f3c-b980-ab2f989b754f-config-data-custom\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.308052 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-config-data\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.309961 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aad241d9-99ca-4f3c-b980-ab2f989b754f-config-data\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.319374 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aad241d9-99ca-4f3c-b980-ab2f989b754f-combined-ca-bundle\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.320363 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-config-data-custom\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.341980 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npjgl\" (UniqueName: \"kubernetes.io/projected/aad241d9-99ca-4f3c-b980-ab2f989b754f-kube-api-access-npjgl\") pod \"barbican-worker-6c75fb47bc-c7g9j\" (UID: \"aad241d9-99ca-4f3c-b980-ab2f989b754f\") " pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.346436 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-combined-ca-bundle\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.364389 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xv5r\" (UniqueName: \"kubernetes.io/projected/f6f2f55a-2e14-44c7-baf4-8db6675cf9f3-kube-api-access-4xv5r\") pod \"barbican-keystone-listener-6f4548857b-5c9mn\" (UID: \"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3\") " pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.394032 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-combined-ca-bundle\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.394118 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-dns-svc\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.394162 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-config-data\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.394187 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a123b05c-539e-420b-8a0d-f018036556eb-logs\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.394209 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8fgz\" (UniqueName: \"kubernetes.io/projected/a123b05c-539e-420b-8a0d-f018036556eb-kube-api-access-p8fgz\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.394241 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-ovsdbserver-sb\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.394291 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-config\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.394333 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf7l2\" (UniqueName: \"kubernetes.io/projected/9005c62c-87ce-434f-8781-d337d52a7056-kube-api-access-cf7l2\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.394369 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-config-data-custom\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.394416 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-ovsdbserver-nb\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.395386 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-ovsdbserver-nb\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.396879 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-dns-svc\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.397618 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-ovsdbserver-sb\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.398290 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-config\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.417377 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf7l2\" (UniqueName: \"kubernetes.io/projected/9005c62c-87ce-434f-8781-d337d52a7056-kube-api-access-cf7l2\") pod \"dnsmasq-dns-d44f4f795-bdtdw\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.459582 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.499001 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-combined-ca-bundle\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.499286 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-config-data\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.499432 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a123b05c-539e-420b-8a0d-f018036556eb-logs\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.499547 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8fgz\" (UniqueName: \"kubernetes.io/projected/a123b05c-539e-420b-8a0d-f018036556eb-kube-api-access-p8fgz\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.499989 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-config-data-custom\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.500153 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a123b05c-539e-420b-8a0d-f018036556eb-logs\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.505076 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-config-data\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.505292 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-config-data-custom\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.505705 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-combined-ca-bundle\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.519380 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8fgz\" (UniqueName: \"kubernetes.io/projected/a123b05c-539e-420b-8a0d-f018036556eb-kube-api-access-p8fgz\") pod \"barbican-api-84d467b9b8-4lqh2\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.567186 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.606118 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6c75fb47bc-c7g9j" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.657772 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" Dec 03 18:05:32 crc kubenswrapper[5002]: I1203 18:05:32.975570 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d44f4f795-bdtdw"] Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.087911 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6f4548857b-5c9mn"] Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.128434 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-84d467b9b8-4lqh2"] Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.262654 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6c75fb47bc-c7g9j"] Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.792634 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c75fb47bc-c7g9j" event={"ID":"aad241d9-99ca-4f3c-b980-ab2f989b754f","Type":"ContainerStarted","Data":"70c9da9f37e6dc37a54e35545c15f2ec6d9c8001c5c3cb7613e0bff98aca801e"} Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.793104 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c75fb47bc-c7g9j" event={"ID":"aad241d9-99ca-4f3c-b980-ab2f989b754f","Type":"ContainerStarted","Data":"a97961c45a2923d7529fa5e6dcb6e6ed3bcb45a370c1f071695c4d12903e5d9b"} Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.800188 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-84d467b9b8-4lqh2" event={"ID":"a123b05c-539e-420b-8a0d-f018036556eb","Type":"ContainerStarted","Data":"1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274"} Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.801193 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.801410 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-84d467b9b8-4lqh2" event={"ID":"a123b05c-539e-420b-8a0d-f018036556eb","Type":"ContainerStarted","Data":"b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c"} Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.801549 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-84d467b9b8-4lqh2" event={"ID":"a123b05c-539e-420b-8a0d-f018036556eb","Type":"ContainerStarted","Data":"ac0209a82a68218f438f24f243a1e39389ba18dda924edc310e86a2fa1a75315"} Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.801708 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.811835 5002 generic.go:334] "Generic (PLEG): container finished" podID="9005c62c-87ce-434f-8781-d337d52a7056" containerID="84bfb0860a1085d617018a5199278886c08e68690e2c0ecfcd4da60ca6e296d7" exitCode=0 Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.814356 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" event={"ID":"9005c62c-87ce-434f-8781-d337d52a7056","Type":"ContainerDied","Data":"84bfb0860a1085d617018a5199278886c08e68690e2c0ecfcd4da60ca6e296d7"} Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.814418 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" event={"ID":"9005c62c-87ce-434f-8781-d337d52a7056","Type":"ContainerStarted","Data":"7fa5acddc3ae9f09c94de8d51b70a02786aa11504454f02111f3f8ce11cc1e47"} Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.826694 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" event={"ID":"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3","Type":"ContainerStarted","Data":"2d251ae9a315fac128703924a35fe3cf853f9165b02f218f0f93c6f931dc9bed"} Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.826774 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" event={"ID":"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3","Type":"ContainerStarted","Data":"b66d62235f6c9cfaf25a399febbdc034e95cebf1d6e57c5e7e8bcacda63a90ea"} Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.826790 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" event={"ID":"f6f2f55a-2e14-44c7-baf4-8db6675cf9f3","Type":"ContainerStarted","Data":"1c484a3acafefa74e6144b25915b35281258a8a55fe50d40db273390710b4bed"} Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.839480 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-84d467b9b8-4lqh2" podStartSLOduration=1.839454111 podStartE2EDuration="1.839454111s" podCreationTimestamp="2025-12-03 18:05:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:05:33.828678701 +0000 UTC m=+5657.242500609" watchObservedRunningTime="2025-12-03 18:05:33.839454111 +0000 UTC m=+5657.253275999" Dec 03 18:05:33 crc kubenswrapper[5002]: I1203 18:05:33.875167 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6f4548857b-5c9mn" podStartSLOduration=2.875142761 podStartE2EDuration="2.875142761s" podCreationTimestamp="2025-12-03 18:05:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:05:33.862203603 +0000 UTC m=+5657.276025491" watchObservedRunningTime="2025-12-03 18:05:33.875142761 +0000 UTC m=+5657.288964649" Dec 03 18:05:34 crc kubenswrapper[5002]: I1203 18:05:34.851119 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:34 crc kubenswrapper[5002]: I1203 18:05:34.851670 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" event={"ID":"9005c62c-87ce-434f-8781-d337d52a7056","Type":"ContainerStarted","Data":"cfe83dc6104816c5f1e42112b085e3946c87900be6e5a3e8200f2aaad3ef180b"} Dec 03 18:05:34 crc kubenswrapper[5002]: I1203 18:05:34.851727 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c75fb47bc-c7g9j" event={"ID":"aad241d9-99ca-4f3c-b980-ab2f989b754f","Type":"ContainerStarted","Data":"82e9b9baa5eae49c06e8d8e60c273c0183df22f0d64876b5f7b6ac3e7cbaee8d"} Dec 03 18:05:34 crc kubenswrapper[5002]: I1203 18:05:34.877611 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" podStartSLOduration=2.877591513 podStartE2EDuration="2.877591513s" podCreationTimestamp="2025-12-03 18:05:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:05:34.868728415 +0000 UTC m=+5658.282550303" watchObservedRunningTime="2025-12-03 18:05:34.877591513 +0000 UTC m=+5658.291413401" Dec 03 18:05:34 crc kubenswrapper[5002]: I1203 18:05:34.898494 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6c75fb47bc-c7g9j" podStartSLOduration=3.898472125 podStartE2EDuration="3.898472125s" podCreationTimestamp="2025-12-03 18:05:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:05:34.890990284 +0000 UTC m=+5658.304812172" watchObservedRunningTime="2025-12-03 18:05:34.898472125 +0000 UTC m=+5658.312294013" Dec 03 18:05:34 crc kubenswrapper[5002]: I1203 18:05:34.998620 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-65468cf9b-tkjp6"] Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.000191 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.002541 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.002649 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.066994 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff8ecbd1-789f-490c-a96b-4b2b15d50352-logs\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.067041 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-public-tls-certs\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.067081 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-config-data\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.067126 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-config-data-custom\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.067204 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-internal-tls-certs\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.067225 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-combined-ca-bundle\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.067287 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw2w6\" (UniqueName: \"kubernetes.io/projected/ff8ecbd1-789f-490c-a96b-4b2b15d50352-kube-api-access-dw2w6\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.071749 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-65468cf9b-tkjp6"] Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.168704 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dw2w6\" (UniqueName: \"kubernetes.io/projected/ff8ecbd1-789f-490c-a96b-4b2b15d50352-kube-api-access-dw2w6\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.169267 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff8ecbd1-789f-490c-a96b-4b2b15d50352-logs\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.169295 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-public-tls-certs\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.169337 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-config-data\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.169383 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-config-data-custom\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.169423 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-internal-tls-certs\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.169445 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-combined-ca-bundle\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.169753 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff8ecbd1-789f-490c-a96b-4b2b15d50352-logs\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.174338 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-combined-ca-bundle\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.174386 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-config-data-custom\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.174855 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-config-data\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.178352 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-internal-tls-certs\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.178465 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8ecbd1-789f-490c-a96b-4b2b15d50352-public-tls-certs\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.192486 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dw2w6\" (UniqueName: \"kubernetes.io/projected/ff8ecbd1-789f-490c-a96b-4b2b15d50352-kube-api-access-dw2w6\") pod \"barbican-api-65468cf9b-tkjp6\" (UID: \"ff8ecbd1-789f-490c-a96b-4b2b15d50352\") " pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.322543 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.775984 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-65468cf9b-tkjp6"] Dec 03 18:05:35 crc kubenswrapper[5002]: I1203 18:05:35.857847 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65468cf9b-tkjp6" event={"ID":"ff8ecbd1-789f-490c-a96b-4b2b15d50352","Type":"ContainerStarted","Data":"84fb01f283023c01851ee8c2080946073c9d48d942bab98657fbf02bab2f2855"} Dec 03 18:05:36 crc kubenswrapper[5002]: I1203 18:05:36.873608 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65468cf9b-tkjp6" event={"ID":"ff8ecbd1-789f-490c-a96b-4b2b15d50352","Type":"ContainerStarted","Data":"d9e2f9a9370e11ac6b1d828d9b906f464eec5c8e36c4370120ffef6263a03d0d"} Dec 03 18:05:37 crc kubenswrapper[5002]: I1203 18:05:37.886030 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65468cf9b-tkjp6" event={"ID":"ff8ecbd1-789f-490c-a96b-4b2b15d50352","Type":"ContainerStarted","Data":"f412d7afaa9fec22c92912d9d88501c45607fc34b9afb68abae7d0dbce3cefc3"} Dec 03 18:05:37 crc kubenswrapper[5002]: I1203 18:05:37.887524 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:37 crc kubenswrapper[5002]: I1203 18:05:37.887555 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:37 crc kubenswrapper[5002]: I1203 18:05:37.917476 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-65468cf9b-tkjp6" podStartSLOduration=3.917436714 podStartE2EDuration="3.917436714s" podCreationTimestamp="2025-12-03 18:05:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:05:37.911635187 +0000 UTC m=+5661.325457085" watchObservedRunningTime="2025-12-03 18:05:37.917436714 +0000 UTC m=+5661.331258612" Dec 03 18:05:42 crc kubenswrapper[5002]: I1203 18:05:42.461867 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:05:42 crc kubenswrapper[5002]: I1203 18:05:42.518966 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-644fc4fd69-lcc74"] Dec 03 18:05:42 crc kubenswrapper[5002]: I1203 18:05:42.519210 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" podUID="c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" containerName="dnsmasq-dns" containerID="cri-o://ba6d177f1a840691c2d7ae44387ca755a92245b51c87da5fc9b0c1bdd59fe52f" gracePeriod=10 Dec 03 18:05:42 crc kubenswrapper[5002]: I1203 18:05:42.971210 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" event={"ID":"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8","Type":"ContainerDied","Data":"ba6d177f1a840691c2d7ae44387ca755a92245b51c87da5fc9b0c1bdd59fe52f"} Dec 03 18:05:42 crc kubenswrapper[5002]: I1203 18:05:42.971082 5002 generic.go:334] "Generic (PLEG): container finished" podID="c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" containerID="ba6d177f1a840691c2d7ae44387ca755a92245b51c87da5fc9b0c1bdd59fe52f" exitCode=0 Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.044620 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.246765 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-ovsdbserver-nb\") pod \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.247564 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-dns-svc\") pod \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.247612 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-ovsdbserver-sb\") pod \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.247669 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkczs\" (UniqueName: \"kubernetes.io/projected/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-kube-api-access-rkczs\") pod \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.247771 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-config\") pod \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\" (UID: \"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8\") " Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.263915 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-kube-api-access-rkczs" (OuterVolumeSpecName: "kube-api-access-rkczs") pod "c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" (UID: "c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8"). InnerVolumeSpecName "kube-api-access-rkczs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.296698 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" (UID: "c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.298751 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-config" (OuterVolumeSpecName: "config") pod "c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" (UID: "c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.306247 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" (UID: "c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.308236 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" (UID: "c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.349277 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.349322 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.349336 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.349350 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkczs\" (UniqueName: \"kubernetes.io/projected/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-kube-api-access-rkczs\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.349367 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.980112 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.980068 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" event={"ID":"c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8","Type":"ContainerDied","Data":"841722a43acd5a1623736bd88ecde43e93952fc91e20ad4266fecd9e34005848"} Dec 03 18:05:43 crc kubenswrapper[5002]: I1203 18:05:43.980253 5002 scope.go:117] "RemoveContainer" containerID="ba6d177f1a840691c2d7ae44387ca755a92245b51c87da5fc9b0c1bdd59fe52f" Dec 03 18:05:44 crc kubenswrapper[5002]: I1203 18:05:44.006180 5002 scope.go:117] "RemoveContainer" containerID="42f3bae3906a2e05de44868b8469842343d86a83baf89556161dd6b5974cdd5f" Dec 03 18:05:44 crc kubenswrapper[5002]: I1203 18:05:44.028456 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-644fc4fd69-lcc74"] Dec 03 18:05:44 crc kubenswrapper[5002]: I1203 18:05:44.041095 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-644fc4fd69-lcc74"] Dec 03 18:05:44 crc kubenswrapper[5002]: I1203 18:05:44.211581 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:44 crc kubenswrapper[5002]: I1203 18:05:44.360507 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:44 crc kubenswrapper[5002]: I1203 18:05:44.852235 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" path="/var/lib/kubelet/pods/c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8/volumes" Dec 03 18:05:46 crc kubenswrapper[5002]: I1203 18:05:46.804254 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:46 crc kubenswrapper[5002]: I1203 18:05:46.968447 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-65468cf9b-tkjp6" Dec 03 18:05:47 crc kubenswrapper[5002]: I1203 18:05:47.032700 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-84d467b9b8-4lqh2"] Dec 03 18:05:47 crc kubenswrapper[5002]: I1203 18:05:47.033122 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-84d467b9b8-4lqh2" podUID="a123b05c-539e-420b-8a0d-f018036556eb" containerName="barbican-api-log" containerID="cri-o://b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c" gracePeriod=30 Dec 03 18:05:47 crc kubenswrapper[5002]: I1203 18:05:47.033233 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-84d467b9b8-4lqh2" podUID="a123b05c-539e-420b-8a0d-f018036556eb" containerName="barbican-api" containerID="cri-o://1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274" gracePeriod=30 Dec 03 18:05:47 crc kubenswrapper[5002]: I1203 18:05:47.939178 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-644fc4fd69-lcc74" podUID="c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.19:5353: i/o timeout" Dec 03 18:05:48 crc kubenswrapper[5002]: I1203 18:05:48.017642 5002 generic.go:334] "Generic (PLEG): container finished" podID="a123b05c-539e-420b-8a0d-f018036556eb" containerID="b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c" exitCode=143 Dec 03 18:05:48 crc kubenswrapper[5002]: I1203 18:05:48.017722 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-84d467b9b8-4lqh2" event={"ID":"a123b05c-539e-420b-8a0d-f018036556eb","Type":"ContainerDied","Data":"b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c"} Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.202729 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-84d467b9b8-4lqh2" podUID="a123b05c-539e-420b-8a0d-f018036556eb" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.1.30:9311/healthcheck\": read tcp 10.217.0.2:47530->10.217.1.30:9311: read: connection reset by peer" Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.203723 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-84d467b9b8-4lqh2" podUID="a123b05c-539e-420b-8a0d-f018036556eb" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.1.30:9311/healthcheck\": read tcp 10.217.0.2:47534->10.217.1.30:9311: read: connection reset by peer" Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.629294 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.786337 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8fgz\" (UniqueName: \"kubernetes.io/projected/a123b05c-539e-420b-8a0d-f018036556eb-kube-api-access-p8fgz\") pod \"a123b05c-539e-420b-8a0d-f018036556eb\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.786397 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a123b05c-539e-420b-8a0d-f018036556eb-logs\") pod \"a123b05c-539e-420b-8a0d-f018036556eb\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.786442 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-config-data\") pod \"a123b05c-539e-420b-8a0d-f018036556eb\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.786478 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-combined-ca-bundle\") pod \"a123b05c-539e-420b-8a0d-f018036556eb\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.786512 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-config-data-custom\") pod \"a123b05c-539e-420b-8a0d-f018036556eb\" (UID: \"a123b05c-539e-420b-8a0d-f018036556eb\") " Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.787942 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a123b05c-539e-420b-8a0d-f018036556eb-logs" (OuterVolumeSpecName: "logs") pod "a123b05c-539e-420b-8a0d-f018036556eb" (UID: "a123b05c-539e-420b-8a0d-f018036556eb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.792181 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a123b05c-539e-420b-8a0d-f018036556eb" (UID: "a123b05c-539e-420b-8a0d-f018036556eb"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.792828 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a123b05c-539e-420b-8a0d-f018036556eb-kube-api-access-p8fgz" (OuterVolumeSpecName: "kube-api-access-p8fgz") pod "a123b05c-539e-420b-8a0d-f018036556eb" (UID: "a123b05c-539e-420b-8a0d-f018036556eb"). InnerVolumeSpecName "kube-api-access-p8fgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.811909 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a123b05c-539e-420b-8a0d-f018036556eb" (UID: "a123b05c-539e-420b-8a0d-f018036556eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.835248 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-config-data" (OuterVolumeSpecName: "config-data") pod "a123b05c-539e-420b-8a0d-f018036556eb" (UID: "a123b05c-539e-420b-8a0d-f018036556eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.888604 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8fgz\" (UniqueName: \"kubernetes.io/projected/a123b05c-539e-420b-8a0d-f018036556eb-kube-api-access-p8fgz\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.888693 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a123b05c-539e-420b-8a0d-f018036556eb-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.888929 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.888982 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:50 crc kubenswrapper[5002]: I1203 18:05:50.888995 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a123b05c-539e-420b-8a0d-f018036556eb-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:51 crc kubenswrapper[5002]: I1203 18:05:51.042004 5002 generic.go:334] "Generic (PLEG): container finished" podID="a123b05c-539e-420b-8a0d-f018036556eb" containerID="1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274" exitCode=0 Dec 03 18:05:51 crc kubenswrapper[5002]: I1203 18:05:51.042046 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-84d467b9b8-4lqh2" Dec 03 18:05:51 crc kubenswrapper[5002]: I1203 18:05:51.042051 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-84d467b9b8-4lqh2" event={"ID":"a123b05c-539e-420b-8a0d-f018036556eb","Type":"ContainerDied","Data":"1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274"} Dec 03 18:05:51 crc kubenswrapper[5002]: I1203 18:05:51.042177 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-84d467b9b8-4lqh2" event={"ID":"a123b05c-539e-420b-8a0d-f018036556eb","Type":"ContainerDied","Data":"ac0209a82a68218f438f24f243a1e39389ba18dda924edc310e86a2fa1a75315"} Dec 03 18:05:51 crc kubenswrapper[5002]: I1203 18:05:51.042198 5002 scope.go:117] "RemoveContainer" containerID="1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274" Dec 03 18:05:51 crc kubenswrapper[5002]: I1203 18:05:51.065030 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-84d467b9b8-4lqh2"] Dec 03 18:05:51 crc kubenswrapper[5002]: I1203 18:05:51.066368 5002 scope.go:117] "RemoveContainer" containerID="b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c" Dec 03 18:05:51 crc kubenswrapper[5002]: I1203 18:05:51.071360 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-84d467b9b8-4lqh2"] Dec 03 18:05:51 crc kubenswrapper[5002]: I1203 18:05:51.080648 5002 scope.go:117] "RemoveContainer" containerID="1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274" Dec 03 18:05:51 crc kubenswrapper[5002]: E1203 18:05:51.081085 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274\": container with ID starting with 1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274 not found: ID does not exist" containerID="1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274" Dec 03 18:05:51 crc kubenswrapper[5002]: I1203 18:05:51.081114 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274"} err="failed to get container status \"1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274\": rpc error: code = NotFound desc = could not find container \"1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274\": container with ID starting with 1b9806f5d298ac227a76ecbbcc94da0621f05c00722dc2eecc6dd76695627274 not found: ID does not exist" Dec 03 18:05:51 crc kubenswrapper[5002]: I1203 18:05:51.081135 5002 scope.go:117] "RemoveContainer" containerID="b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c" Dec 03 18:05:51 crc kubenswrapper[5002]: E1203 18:05:51.081392 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c\": container with ID starting with b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c not found: ID does not exist" containerID="b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c" Dec 03 18:05:51 crc kubenswrapper[5002]: I1203 18:05:51.081414 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c"} err="failed to get container status \"b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c\": rpc error: code = NotFound desc = could not find container \"b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c\": container with ID starting with b0abf955334d735e4850d12bf1e62414ec1a6c0fb99c65ed92f24c68080bb15c not found: ID does not exist" Dec 03 18:05:52 crc kubenswrapper[5002]: I1203 18:05:52.854308 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a123b05c-539e-420b-8a0d-f018036556eb" path="/var/lib/kubelet/pods/a123b05c-539e-420b-8a0d-f018036556eb/volumes" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.220165 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-lskdt"] Dec 03 18:05:54 crc kubenswrapper[5002]: E1203 18:05:54.220883 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a123b05c-539e-420b-8a0d-f018036556eb" containerName="barbican-api" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.220898 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a123b05c-539e-420b-8a0d-f018036556eb" containerName="barbican-api" Dec 03 18:05:54 crc kubenswrapper[5002]: E1203 18:05:54.220917 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" containerName="dnsmasq-dns" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.220923 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" containerName="dnsmasq-dns" Dec 03 18:05:54 crc kubenswrapper[5002]: E1203 18:05:54.220948 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a123b05c-539e-420b-8a0d-f018036556eb" containerName="barbican-api-log" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.220957 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a123b05c-539e-420b-8a0d-f018036556eb" containerName="barbican-api-log" Dec 03 18:05:54 crc kubenswrapper[5002]: E1203 18:05:54.220982 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" containerName="init" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.220988 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" containerName="init" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.221179 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0c9418f-66dc-4f0d-bf8b-1ed89b29dcd8" containerName="dnsmasq-dns" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.221209 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a123b05c-539e-420b-8a0d-f018036556eb" containerName="barbican-api" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.221224 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a123b05c-539e-420b-8a0d-f018036556eb" containerName="barbican-api-log" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.221922 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-lskdt" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.245711 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3-operator-scripts\") pod \"neutron-db-create-lskdt\" (UID: \"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3\") " pod="openstack/neutron-db-create-lskdt" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.245880 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ncdk\" (UniqueName: \"kubernetes.io/projected/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3-kube-api-access-6ncdk\") pod \"neutron-db-create-lskdt\" (UID: \"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3\") " pod="openstack/neutron-db-create-lskdt" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.266972 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-lskdt"] Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.322599 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-818a-account-create-update-gdv5h"] Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.323949 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-818a-account-create-update-gdv5h" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.327472 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.335147 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-818a-account-create-update-gdv5h"] Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.347312 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec3180de-e934-4cc6-8657-b30a589eebd4-operator-scripts\") pod \"neutron-818a-account-create-update-gdv5h\" (UID: \"ec3180de-e934-4cc6-8657-b30a589eebd4\") " pod="openstack/neutron-818a-account-create-update-gdv5h" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.347678 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r7k4\" (UniqueName: \"kubernetes.io/projected/ec3180de-e934-4cc6-8657-b30a589eebd4-kube-api-access-6r7k4\") pod \"neutron-818a-account-create-update-gdv5h\" (UID: \"ec3180de-e934-4cc6-8657-b30a589eebd4\") " pod="openstack/neutron-818a-account-create-update-gdv5h" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.347809 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3-operator-scripts\") pod \"neutron-db-create-lskdt\" (UID: \"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3\") " pod="openstack/neutron-db-create-lskdt" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.347970 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ncdk\" (UniqueName: \"kubernetes.io/projected/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3-kube-api-access-6ncdk\") pod \"neutron-db-create-lskdt\" (UID: \"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3\") " pod="openstack/neutron-db-create-lskdt" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.354986 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3-operator-scripts\") pod \"neutron-db-create-lskdt\" (UID: \"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3\") " pod="openstack/neutron-db-create-lskdt" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.368631 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ncdk\" (UniqueName: \"kubernetes.io/projected/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3-kube-api-access-6ncdk\") pod \"neutron-db-create-lskdt\" (UID: \"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3\") " pod="openstack/neutron-db-create-lskdt" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.450223 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec3180de-e934-4cc6-8657-b30a589eebd4-operator-scripts\") pod \"neutron-818a-account-create-update-gdv5h\" (UID: \"ec3180de-e934-4cc6-8657-b30a589eebd4\") " pod="openstack/neutron-818a-account-create-update-gdv5h" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.450376 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r7k4\" (UniqueName: \"kubernetes.io/projected/ec3180de-e934-4cc6-8657-b30a589eebd4-kube-api-access-6r7k4\") pod \"neutron-818a-account-create-update-gdv5h\" (UID: \"ec3180de-e934-4cc6-8657-b30a589eebd4\") " pod="openstack/neutron-818a-account-create-update-gdv5h" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.451133 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec3180de-e934-4cc6-8657-b30a589eebd4-operator-scripts\") pod \"neutron-818a-account-create-update-gdv5h\" (UID: \"ec3180de-e934-4cc6-8657-b30a589eebd4\") " pod="openstack/neutron-818a-account-create-update-gdv5h" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.471423 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r7k4\" (UniqueName: \"kubernetes.io/projected/ec3180de-e934-4cc6-8657-b30a589eebd4-kube-api-access-6r7k4\") pod \"neutron-818a-account-create-update-gdv5h\" (UID: \"ec3180de-e934-4cc6-8657-b30a589eebd4\") " pod="openstack/neutron-818a-account-create-update-gdv5h" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.551994 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-lskdt" Dec 03 18:05:54 crc kubenswrapper[5002]: I1203 18:05:54.640520 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-818a-account-create-update-gdv5h" Dec 03 18:05:55 crc kubenswrapper[5002]: W1203 18:05:55.067037 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ef4446f_672a_465a_a1a6_e51cc9f5e5f3.slice/crio-babad8bae1a0b0197afbc74878583dc8ee6819d6ca6bb0d6c06012d08d32bff5 WatchSource:0}: Error finding container babad8bae1a0b0197afbc74878583dc8ee6819d6ca6bb0d6c06012d08d32bff5: Status 404 returned error can't find the container with id babad8bae1a0b0197afbc74878583dc8ee6819d6ca6bb0d6c06012d08d32bff5 Dec 03 18:05:55 crc kubenswrapper[5002]: I1203 18:05:55.067576 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-lskdt"] Dec 03 18:05:55 crc kubenswrapper[5002]: I1203 18:05:55.128120 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-818a-account-create-update-gdv5h"] Dec 03 18:05:55 crc kubenswrapper[5002]: W1203 18:05:55.136573 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec3180de_e934_4cc6_8657_b30a589eebd4.slice/crio-74b7334688425df8328d10df4af71e7085da090105d85d3bef63125e9ae45145 WatchSource:0}: Error finding container 74b7334688425df8328d10df4af71e7085da090105d85d3bef63125e9ae45145: Status 404 returned error can't find the container with id 74b7334688425df8328d10df4af71e7085da090105d85d3bef63125e9ae45145 Dec 03 18:05:56 crc kubenswrapper[5002]: I1203 18:05:56.084520 5002 generic.go:334] "Generic (PLEG): container finished" podID="ec3180de-e934-4cc6-8657-b30a589eebd4" containerID="d97f51523fe99c96754f5e9c8d39bca99d8fa2947865bb07d38a5b49beed166c" exitCode=0 Dec 03 18:05:56 crc kubenswrapper[5002]: I1203 18:05:56.084629 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-818a-account-create-update-gdv5h" event={"ID":"ec3180de-e934-4cc6-8657-b30a589eebd4","Type":"ContainerDied","Data":"d97f51523fe99c96754f5e9c8d39bca99d8fa2947865bb07d38a5b49beed166c"} Dec 03 18:05:56 crc kubenswrapper[5002]: I1203 18:05:56.084973 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-818a-account-create-update-gdv5h" event={"ID":"ec3180de-e934-4cc6-8657-b30a589eebd4","Type":"ContainerStarted","Data":"74b7334688425df8328d10df4af71e7085da090105d85d3bef63125e9ae45145"} Dec 03 18:05:56 crc kubenswrapper[5002]: I1203 18:05:56.087047 5002 generic.go:334] "Generic (PLEG): container finished" podID="6ef4446f-672a-465a-a1a6-e51cc9f5e5f3" containerID="21653a3fcbc03467e833df3749b6015213e54c3458acb540a26f717411ac6b5c" exitCode=0 Dec 03 18:05:56 crc kubenswrapper[5002]: I1203 18:05:56.087102 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-lskdt" event={"ID":"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3","Type":"ContainerDied","Data":"21653a3fcbc03467e833df3749b6015213e54c3458acb540a26f717411ac6b5c"} Dec 03 18:05:56 crc kubenswrapper[5002]: I1203 18:05:56.087132 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-lskdt" event={"ID":"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3","Type":"ContainerStarted","Data":"babad8bae1a0b0197afbc74878583dc8ee6819d6ca6bb0d6c06012d08d32bff5"} Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.478148 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-lskdt" Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.486781 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-818a-account-create-update-gdv5h" Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.599856 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec3180de-e934-4cc6-8657-b30a589eebd4-operator-scripts\") pod \"ec3180de-e934-4cc6-8657-b30a589eebd4\" (UID: \"ec3180de-e934-4cc6-8657-b30a589eebd4\") " Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.599999 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3-operator-scripts\") pod \"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3\" (UID: \"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3\") " Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.600094 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6r7k4\" (UniqueName: \"kubernetes.io/projected/ec3180de-e934-4cc6-8657-b30a589eebd4-kube-api-access-6r7k4\") pod \"ec3180de-e934-4cc6-8657-b30a589eebd4\" (UID: \"ec3180de-e934-4cc6-8657-b30a589eebd4\") " Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.600162 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ncdk\" (UniqueName: \"kubernetes.io/projected/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3-kube-api-access-6ncdk\") pod \"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3\" (UID: \"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3\") " Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.600968 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec3180de-e934-4cc6-8657-b30a589eebd4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ec3180de-e934-4cc6-8657-b30a589eebd4" (UID: "ec3180de-e934-4cc6-8657-b30a589eebd4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.601331 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6ef4446f-672a-465a-a1a6-e51cc9f5e5f3" (UID: "6ef4446f-672a-465a-a1a6-e51cc9f5e5f3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.606949 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec3180de-e934-4cc6-8657-b30a589eebd4-kube-api-access-6r7k4" (OuterVolumeSpecName: "kube-api-access-6r7k4") pod "ec3180de-e934-4cc6-8657-b30a589eebd4" (UID: "ec3180de-e934-4cc6-8657-b30a589eebd4"). InnerVolumeSpecName "kube-api-access-6r7k4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.618130 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3-kube-api-access-6ncdk" (OuterVolumeSpecName: "kube-api-access-6ncdk") pod "6ef4446f-672a-465a-a1a6-e51cc9f5e5f3" (UID: "6ef4446f-672a-465a-a1a6-e51cc9f5e5f3"). InnerVolumeSpecName "kube-api-access-6ncdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.702593 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6r7k4\" (UniqueName: \"kubernetes.io/projected/ec3180de-e934-4cc6-8657-b30a589eebd4-kube-api-access-6r7k4\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.702647 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ncdk\" (UniqueName: \"kubernetes.io/projected/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3-kube-api-access-6ncdk\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.702663 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec3180de-e934-4cc6-8657-b30a589eebd4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:57 crc kubenswrapper[5002]: I1203 18:05:57.702676 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:05:58 crc kubenswrapper[5002]: I1203 18:05:58.105372 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-lskdt" event={"ID":"6ef4446f-672a-465a-a1a6-e51cc9f5e5f3","Type":"ContainerDied","Data":"babad8bae1a0b0197afbc74878583dc8ee6819d6ca6bb0d6c06012d08d32bff5"} Dec 03 18:05:58 crc kubenswrapper[5002]: I1203 18:05:58.105732 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="babad8bae1a0b0197afbc74878583dc8ee6819d6ca6bb0d6c06012d08d32bff5" Dec 03 18:05:58 crc kubenswrapper[5002]: I1203 18:05:58.105397 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-lskdt" Dec 03 18:05:58 crc kubenswrapper[5002]: I1203 18:05:58.107051 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-818a-account-create-update-gdv5h" event={"ID":"ec3180de-e934-4cc6-8657-b30a589eebd4","Type":"ContainerDied","Data":"74b7334688425df8328d10df4af71e7085da090105d85d3bef63125e9ae45145"} Dec 03 18:05:58 crc kubenswrapper[5002]: I1203 18:05:58.107078 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74b7334688425df8328d10df4af71e7085da090105d85d3bef63125e9ae45145" Dec 03 18:05:58 crc kubenswrapper[5002]: I1203 18:05:58.107203 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-818a-account-create-update-gdv5h" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.601651 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-gnrcz"] Dec 03 18:05:59 crc kubenswrapper[5002]: E1203 18:05:59.602495 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ef4446f-672a-465a-a1a6-e51cc9f5e5f3" containerName="mariadb-database-create" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.602510 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ef4446f-672a-465a-a1a6-e51cc9f5e5f3" containerName="mariadb-database-create" Dec 03 18:05:59 crc kubenswrapper[5002]: E1203 18:05:59.602543 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec3180de-e934-4cc6-8657-b30a589eebd4" containerName="mariadb-account-create-update" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.602549 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec3180de-e934-4cc6-8657-b30a589eebd4" containerName="mariadb-account-create-update" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.602764 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ef4446f-672a-465a-a1a6-e51cc9f5e5f3" containerName="mariadb-database-create" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.602791 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec3180de-e934-4cc6-8657-b30a589eebd4" containerName="mariadb-account-create-update" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.603422 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.609859 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-9jsw9" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.610036 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.610230 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.637809 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-gnrcz"] Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.735729 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13bf5cea-5dcd-4a75-a88b-30215345f16f-combined-ca-bundle\") pod \"neutron-db-sync-gnrcz\" (UID: \"13bf5cea-5dcd-4a75-a88b-30215345f16f\") " pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.736278 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/13bf5cea-5dcd-4a75-a88b-30215345f16f-config\") pod \"neutron-db-sync-gnrcz\" (UID: \"13bf5cea-5dcd-4a75-a88b-30215345f16f\") " pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.736457 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g887\" (UniqueName: \"kubernetes.io/projected/13bf5cea-5dcd-4a75-a88b-30215345f16f-kube-api-access-2g887\") pod \"neutron-db-sync-gnrcz\" (UID: \"13bf5cea-5dcd-4a75-a88b-30215345f16f\") " pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.838730 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13bf5cea-5dcd-4a75-a88b-30215345f16f-combined-ca-bundle\") pod \"neutron-db-sync-gnrcz\" (UID: \"13bf5cea-5dcd-4a75-a88b-30215345f16f\") " pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.839059 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/13bf5cea-5dcd-4a75-a88b-30215345f16f-config\") pod \"neutron-db-sync-gnrcz\" (UID: \"13bf5cea-5dcd-4a75-a88b-30215345f16f\") " pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.840025 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g887\" (UniqueName: \"kubernetes.io/projected/13bf5cea-5dcd-4a75-a88b-30215345f16f-kube-api-access-2g887\") pod \"neutron-db-sync-gnrcz\" (UID: \"13bf5cea-5dcd-4a75-a88b-30215345f16f\") " pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.846234 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13bf5cea-5dcd-4a75-a88b-30215345f16f-combined-ca-bundle\") pod \"neutron-db-sync-gnrcz\" (UID: \"13bf5cea-5dcd-4a75-a88b-30215345f16f\") " pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.860868 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/13bf5cea-5dcd-4a75-a88b-30215345f16f-config\") pod \"neutron-db-sync-gnrcz\" (UID: \"13bf5cea-5dcd-4a75-a88b-30215345f16f\") " pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.877820 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g887\" (UniqueName: \"kubernetes.io/projected/13bf5cea-5dcd-4a75-a88b-30215345f16f-kube-api-access-2g887\") pod \"neutron-db-sync-gnrcz\" (UID: \"13bf5cea-5dcd-4a75-a88b-30215345f16f\") " pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:05:59 crc kubenswrapper[5002]: I1203 18:05:59.931431 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:06:00 crc kubenswrapper[5002]: I1203 18:06:00.405654 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-gnrcz"] Dec 03 18:06:01 crc kubenswrapper[5002]: I1203 18:06:01.131789 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gnrcz" event={"ID":"13bf5cea-5dcd-4a75-a88b-30215345f16f","Type":"ContainerStarted","Data":"628c3d83bc8b8a64686a81c906b1113136cfbeea92a51f9e878ae4065f87d7f3"} Dec 03 18:06:01 crc kubenswrapper[5002]: I1203 18:06:01.132131 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gnrcz" event={"ID":"13bf5cea-5dcd-4a75-a88b-30215345f16f","Type":"ContainerStarted","Data":"ed4c9047d1d89b7391b7fb21e4b16ea40024935d0db0cc988235715933410362"} Dec 03 18:06:01 crc kubenswrapper[5002]: I1203 18:06:01.163718 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-gnrcz" podStartSLOduration=2.163693807 podStartE2EDuration="2.163693807s" podCreationTimestamp="2025-12-03 18:05:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:06:01.153830032 +0000 UTC m=+5684.567651920" watchObservedRunningTime="2025-12-03 18:06:01.163693807 +0000 UTC m=+5684.577515705" Dec 03 18:06:05 crc kubenswrapper[5002]: I1203 18:06:05.165297 5002 generic.go:334] "Generic (PLEG): container finished" podID="13bf5cea-5dcd-4a75-a88b-30215345f16f" containerID="628c3d83bc8b8a64686a81c906b1113136cfbeea92a51f9e878ae4065f87d7f3" exitCode=0 Dec 03 18:06:05 crc kubenswrapper[5002]: I1203 18:06:05.165393 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gnrcz" event={"ID":"13bf5cea-5dcd-4a75-a88b-30215345f16f","Type":"ContainerDied","Data":"628c3d83bc8b8a64686a81c906b1113136cfbeea92a51f9e878ae4065f87d7f3"} Dec 03 18:06:06 crc kubenswrapper[5002]: I1203 18:06:06.493061 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:06:06 crc kubenswrapper[5002]: I1203 18:06:06.674772 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/13bf5cea-5dcd-4a75-a88b-30215345f16f-config\") pod \"13bf5cea-5dcd-4a75-a88b-30215345f16f\" (UID: \"13bf5cea-5dcd-4a75-a88b-30215345f16f\") " Dec 03 18:06:06 crc kubenswrapper[5002]: I1203 18:06:06.674862 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13bf5cea-5dcd-4a75-a88b-30215345f16f-combined-ca-bundle\") pod \"13bf5cea-5dcd-4a75-a88b-30215345f16f\" (UID: \"13bf5cea-5dcd-4a75-a88b-30215345f16f\") " Dec 03 18:06:06 crc kubenswrapper[5002]: I1203 18:06:06.674900 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2g887\" (UniqueName: \"kubernetes.io/projected/13bf5cea-5dcd-4a75-a88b-30215345f16f-kube-api-access-2g887\") pod \"13bf5cea-5dcd-4a75-a88b-30215345f16f\" (UID: \"13bf5cea-5dcd-4a75-a88b-30215345f16f\") " Dec 03 18:06:06 crc kubenswrapper[5002]: I1203 18:06:06.680572 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13bf5cea-5dcd-4a75-a88b-30215345f16f-kube-api-access-2g887" (OuterVolumeSpecName: "kube-api-access-2g887") pod "13bf5cea-5dcd-4a75-a88b-30215345f16f" (UID: "13bf5cea-5dcd-4a75-a88b-30215345f16f"). InnerVolumeSpecName "kube-api-access-2g887". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:06:06 crc kubenswrapper[5002]: I1203 18:06:06.699025 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13bf5cea-5dcd-4a75-a88b-30215345f16f-config" (OuterVolumeSpecName: "config") pod "13bf5cea-5dcd-4a75-a88b-30215345f16f" (UID: "13bf5cea-5dcd-4a75-a88b-30215345f16f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:06:06 crc kubenswrapper[5002]: I1203 18:06:06.726013 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13bf5cea-5dcd-4a75-a88b-30215345f16f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13bf5cea-5dcd-4a75-a88b-30215345f16f" (UID: "13bf5cea-5dcd-4a75-a88b-30215345f16f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:06:06 crc kubenswrapper[5002]: I1203 18:06:06.777177 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/13bf5cea-5dcd-4a75-a88b-30215345f16f-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:06 crc kubenswrapper[5002]: I1203 18:06:06.777215 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13bf5cea-5dcd-4a75-a88b-30215345f16f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:06 crc kubenswrapper[5002]: I1203 18:06:06.777228 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2g887\" (UniqueName: \"kubernetes.io/projected/13bf5cea-5dcd-4a75-a88b-30215345f16f-kube-api-access-2g887\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.187000 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gnrcz" event={"ID":"13bf5cea-5dcd-4a75-a88b-30215345f16f","Type":"ContainerDied","Data":"ed4c9047d1d89b7391b7fb21e4b16ea40024935d0db0cc988235715933410362"} Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.187048 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed4c9047d1d89b7391b7fb21e4b16ea40024935d0db0cc988235715933410362" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.187065 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gnrcz" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.437381 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77598f7887-6vfxd"] Dec 03 18:06:07 crc kubenswrapper[5002]: E1203 18:06:07.438052 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13bf5cea-5dcd-4a75-a88b-30215345f16f" containerName="neutron-db-sync" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.438067 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="13bf5cea-5dcd-4a75-a88b-30215345f16f" containerName="neutron-db-sync" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.438227 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="13bf5cea-5dcd-4a75-a88b-30215345f16f" containerName="neutron-db-sync" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.439104 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.453710 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77598f7887-6vfxd"] Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.592143 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-ovsdbserver-nb\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.592250 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-dns-svc\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.592332 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj5ln\" (UniqueName: \"kubernetes.io/projected/c5edca1a-5243-45a7-8700-19a54a2cd1b2-kube-api-access-hj5ln\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.592359 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-config\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.592393 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-ovsdbserver-sb\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.675475 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7d79b97dc6-pfr8r"] Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.677164 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.681355 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.681470 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-9jsw9" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.682384 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.693603 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj5ln\" (UniqueName: \"kubernetes.io/projected/c5edca1a-5243-45a7-8700-19a54a2cd1b2-kube-api-access-hj5ln\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.693662 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-config\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.693715 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-ovsdbserver-sb\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.693811 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-ovsdbserver-nb\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.693900 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-dns-svc\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.694501 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.694728 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-ovsdbserver-sb\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.694757 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-config\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.694904 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-ovsdbserver-nb\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.694904 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-dns-svc\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.696987 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7d79b97dc6-pfr8r"] Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.725081 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj5ln\" (UniqueName: \"kubernetes.io/projected/c5edca1a-5243-45a7-8700-19a54a2cd1b2-kube-api-access-hj5ln\") pod \"dnsmasq-dns-77598f7887-6vfxd\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.754872 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.796235 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7fl9\" (UniqueName: \"kubernetes.io/projected/79deab8e-6745-4271-ab22-65f25550e578-kube-api-access-g7fl9\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.796309 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-config\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.796361 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-httpd-config\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.796402 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-combined-ca-bundle\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.796434 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-ovndb-tls-certs\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.897737 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7fl9\" (UniqueName: \"kubernetes.io/projected/79deab8e-6745-4271-ab22-65f25550e578-kube-api-access-g7fl9\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.898135 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-config\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.898184 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-httpd-config\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.898215 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-combined-ca-bundle\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.898243 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-ovndb-tls-certs\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.902691 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-ovndb-tls-certs\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.904572 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-combined-ca-bundle\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.906387 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-httpd-config\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.912418 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-config\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.928463 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7fl9\" (UniqueName: \"kubernetes.io/projected/79deab8e-6745-4271-ab22-65f25550e578-kube-api-access-g7fl9\") pod \"neutron-7d79b97dc6-pfr8r\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:07 crc kubenswrapper[5002]: I1203 18:06:07.992034 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:08 crc kubenswrapper[5002]: I1203 18:06:08.313847 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77598f7887-6vfxd"] Dec 03 18:06:08 crc kubenswrapper[5002]: I1203 18:06:08.623124 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7d79b97dc6-pfr8r"] Dec 03 18:06:08 crc kubenswrapper[5002]: E1203 18:06:08.897516 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5edca1a_5243_45a7_8700_19a54a2cd1b2.slice/crio-e48762ad8041c38d72288d9257ed0cf66a53a28f9024dfff418e1e8131776d23.scope\": RecentStats: unable to find data in memory cache]" Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.207612 5002 generic.go:334] "Generic (PLEG): container finished" podID="c5edca1a-5243-45a7-8700-19a54a2cd1b2" containerID="e48762ad8041c38d72288d9257ed0cf66a53a28f9024dfff418e1e8131776d23" exitCode=0 Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.207673 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" event={"ID":"c5edca1a-5243-45a7-8700-19a54a2cd1b2","Type":"ContainerDied","Data":"e48762ad8041c38d72288d9257ed0cf66a53a28f9024dfff418e1e8131776d23"} Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.207699 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" event={"ID":"c5edca1a-5243-45a7-8700-19a54a2cd1b2","Type":"ContainerStarted","Data":"17cedb1eea1eaa398f10939c4d390201adf2d9e5d57634a147ededfa79ef5b75"} Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.216735 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7d79b97dc6-pfr8r" event={"ID":"79deab8e-6745-4271-ab22-65f25550e578","Type":"ContainerStarted","Data":"44a056753480c7af5318199e4211fa23fb4c508ca5ce2953622d9a7de8a9886d"} Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.217197 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7d79b97dc6-pfr8r" event={"ID":"79deab8e-6745-4271-ab22-65f25550e578","Type":"ContainerStarted","Data":"b3b4ea05ec9d83831db3f16ffc1cd6e9b9e582c5e435dadfdcf0a2cd4ad7b300"} Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.217215 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7d79b97dc6-pfr8r" event={"ID":"79deab8e-6745-4271-ab22-65f25550e578","Type":"ContainerStarted","Data":"a715d953978efdd14fe03bc3a226a25dd9a3c7c789ca33b523cb373ef21a8d60"} Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.217245 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.261032 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7d79b97dc6-pfr8r" podStartSLOduration=2.261008573 podStartE2EDuration="2.261008573s" podCreationTimestamp="2025-12-03 18:06:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:06:09.259092972 +0000 UTC m=+5692.672914860" watchObservedRunningTime="2025-12-03 18:06:09.261008573 +0000 UTC m=+5692.674830461" Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.854490 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-9f76d4b69-zqwxj"] Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.855938 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.858137 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.858739 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.877322 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9f76d4b69-zqwxj"] Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.967315 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86qhj\" (UniqueName: \"kubernetes.io/projected/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-kube-api-access-86qhj\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.967675 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-config\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.967709 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-combined-ca-bundle\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.967735 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-ovndb-tls-certs\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.967793 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-httpd-config\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.967856 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-internal-tls-certs\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:09 crc kubenswrapper[5002]: I1203 18:06:09.967921 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-public-tls-certs\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.069353 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86qhj\" (UniqueName: \"kubernetes.io/projected/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-kube-api-access-86qhj\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.069414 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-config\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.069438 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-combined-ca-bundle\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.069459 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-ovndb-tls-certs\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.069495 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-httpd-config\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.069540 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-internal-tls-certs\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.069592 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-public-tls-certs\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.074840 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-public-tls-certs\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.079710 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-internal-tls-certs\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.079795 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-httpd-config\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.080493 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-combined-ca-bundle\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.080487 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-ovndb-tls-certs\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.080538 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-config\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.098119 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86qhj\" (UniqueName: \"kubernetes.io/projected/0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758-kube-api-access-86qhj\") pod \"neutron-9f76d4b69-zqwxj\" (UID: \"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758\") " pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.187782 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.239389 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" event={"ID":"c5edca1a-5243-45a7-8700-19a54a2cd1b2","Type":"ContainerStarted","Data":"ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009"} Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.239474 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.742534 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" podStartSLOduration=3.742513916 podStartE2EDuration="3.742513916s" podCreationTimestamp="2025-12-03 18:06:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:06:10.259104038 +0000 UTC m=+5693.672925946" watchObservedRunningTime="2025-12-03 18:06:10.742513916 +0000 UTC m=+5694.156335824" Dec 03 18:06:10 crc kubenswrapper[5002]: I1203 18:06:10.746267 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9f76d4b69-zqwxj"] Dec 03 18:06:11 crc kubenswrapper[5002]: I1203 18:06:11.250630 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9f76d4b69-zqwxj" event={"ID":"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758","Type":"ContainerStarted","Data":"6fcc2d86dfc07f8a6d25f5bb49f0659ca611fc1db90c7f5a2d0926cba26b3873"} Dec 03 18:06:11 crc kubenswrapper[5002]: I1203 18:06:11.250981 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9f76d4b69-zqwxj" event={"ID":"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758","Type":"ContainerStarted","Data":"eba72ba5a26b2454de4cbd127c3f1a4dca9284894bf07573b86a36af402b38ec"} Dec 03 18:06:12 crc kubenswrapper[5002]: I1203 18:06:12.262445 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9f76d4b69-zqwxj" event={"ID":"0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758","Type":"ContainerStarted","Data":"8566f549878f7fd7cd19da1c543602ec9552fc677f1f55cabffe6f251dcf9c81"} Dec 03 18:06:12 crc kubenswrapper[5002]: I1203 18:06:12.262951 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:12 crc kubenswrapper[5002]: I1203 18:06:12.300019 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-9f76d4b69-zqwxj" podStartSLOduration=3.299994321 podStartE2EDuration="3.299994321s" podCreationTimestamp="2025-12-03 18:06:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:06:12.284617397 +0000 UTC m=+5695.698439285" watchObservedRunningTime="2025-12-03 18:06:12.299994321 +0000 UTC m=+5695.713816209" Dec 03 18:06:17 crc kubenswrapper[5002]: I1203 18:06:17.757910 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:06:17 crc kubenswrapper[5002]: I1203 18:06:17.805703 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d44f4f795-bdtdw"] Dec 03 18:06:17 crc kubenswrapper[5002]: I1203 18:06:17.805986 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" podUID="9005c62c-87ce-434f-8781-d337d52a7056" containerName="dnsmasq-dns" containerID="cri-o://cfe83dc6104816c5f1e42112b085e3946c87900be6e5a3e8200f2aaad3ef180b" gracePeriod=10 Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.315562 5002 generic.go:334] "Generic (PLEG): container finished" podID="9005c62c-87ce-434f-8781-d337d52a7056" containerID="cfe83dc6104816c5f1e42112b085e3946c87900be6e5a3e8200f2aaad3ef180b" exitCode=0 Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.315676 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" event={"ID":"9005c62c-87ce-434f-8781-d337d52a7056","Type":"ContainerDied","Data":"cfe83dc6104816c5f1e42112b085e3946c87900be6e5a3e8200f2aaad3ef180b"} Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.698545 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.738392 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-dns-svc\") pod \"9005c62c-87ce-434f-8781-d337d52a7056\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.738526 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-ovsdbserver-nb\") pod \"9005c62c-87ce-434f-8781-d337d52a7056\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.738579 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-ovsdbserver-sb\") pod \"9005c62c-87ce-434f-8781-d337d52a7056\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.738646 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-config\") pod \"9005c62c-87ce-434f-8781-d337d52a7056\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.738690 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cf7l2\" (UniqueName: \"kubernetes.io/projected/9005c62c-87ce-434f-8781-d337d52a7056-kube-api-access-cf7l2\") pod \"9005c62c-87ce-434f-8781-d337d52a7056\" (UID: \"9005c62c-87ce-434f-8781-d337d52a7056\") " Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.753085 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9005c62c-87ce-434f-8781-d337d52a7056-kube-api-access-cf7l2" (OuterVolumeSpecName: "kube-api-access-cf7l2") pod "9005c62c-87ce-434f-8781-d337d52a7056" (UID: "9005c62c-87ce-434f-8781-d337d52a7056"). InnerVolumeSpecName "kube-api-access-cf7l2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.791491 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9005c62c-87ce-434f-8781-d337d52a7056" (UID: "9005c62c-87ce-434f-8781-d337d52a7056"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.798139 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9005c62c-87ce-434f-8781-d337d52a7056" (UID: "9005c62c-87ce-434f-8781-d337d52a7056"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.800634 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-config" (OuterVolumeSpecName: "config") pod "9005c62c-87ce-434f-8781-d337d52a7056" (UID: "9005c62c-87ce-434f-8781-d337d52a7056"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.813330 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9005c62c-87ce-434f-8781-d337d52a7056" (UID: "9005c62c-87ce-434f-8781-d337d52a7056"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.840488 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.840521 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cf7l2\" (UniqueName: \"kubernetes.io/projected/9005c62c-87ce-434f-8781-d337d52a7056-kube-api-access-cf7l2\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.840534 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.840543 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:18 crc kubenswrapper[5002]: I1203 18:06:18.840552 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9005c62c-87ce-434f-8781-d337d52a7056-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:19 crc kubenswrapper[5002]: I1203 18:06:19.328656 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" event={"ID":"9005c62c-87ce-434f-8781-d337d52a7056","Type":"ContainerDied","Data":"7fa5acddc3ae9f09c94de8d51b70a02786aa11504454f02111f3f8ce11cc1e47"} Dec 03 18:06:19 crc kubenswrapper[5002]: I1203 18:06:19.328708 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d44f4f795-bdtdw" Dec 03 18:06:19 crc kubenswrapper[5002]: I1203 18:06:19.329014 5002 scope.go:117] "RemoveContainer" containerID="cfe83dc6104816c5f1e42112b085e3946c87900be6e5a3e8200f2aaad3ef180b" Dec 03 18:06:19 crc kubenswrapper[5002]: I1203 18:06:19.354651 5002 scope.go:117] "RemoveContainer" containerID="84bfb0860a1085d617018a5199278886c08e68690e2c0ecfcd4da60ca6e296d7" Dec 03 18:06:19 crc kubenswrapper[5002]: I1203 18:06:19.357546 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d44f4f795-bdtdw"] Dec 03 18:06:19 crc kubenswrapper[5002]: I1203 18:06:19.365686 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d44f4f795-bdtdw"] Dec 03 18:06:20 crc kubenswrapper[5002]: I1203 18:06:20.852575 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9005c62c-87ce-434f-8781-d337d52a7056" path="/var/lib/kubelet/pods/9005c62c-87ce-434f-8781-d337d52a7056/volumes" Dec 03 18:06:38 crc kubenswrapper[5002]: I1203 18:06:38.000845 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:40 crc kubenswrapper[5002]: I1203 18:06:40.204543 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-9f76d4b69-zqwxj" Dec 03 18:06:40 crc kubenswrapper[5002]: I1203 18:06:40.272537 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7d79b97dc6-pfr8r"] Dec 03 18:06:40 crc kubenswrapper[5002]: I1203 18:06:40.273378 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7d79b97dc6-pfr8r" podUID="79deab8e-6745-4271-ab22-65f25550e578" containerName="neutron-api" containerID="cri-o://b3b4ea05ec9d83831db3f16ffc1cd6e9b9e582c5e435dadfdcf0a2cd4ad7b300" gracePeriod=30 Dec 03 18:06:40 crc kubenswrapper[5002]: I1203 18:06:40.273466 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7d79b97dc6-pfr8r" podUID="79deab8e-6745-4271-ab22-65f25550e578" containerName="neutron-httpd" containerID="cri-o://44a056753480c7af5318199e4211fa23fb4c508ca5ce2953622d9a7de8a9886d" gracePeriod=30 Dec 03 18:06:40 crc kubenswrapper[5002]: I1203 18:06:40.515899 5002 generic.go:334] "Generic (PLEG): container finished" podID="79deab8e-6745-4271-ab22-65f25550e578" containerID="44a056753480c7af5318199e4211fa23fb4c508ca5ce2953622d9a7de8a9886d" exitCode=0 Dec 03 18:06:40 crc kubenswrapper[5002]: I1203 18:06:40.515941 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7d79b97dc6-pfr8r" event={"ID":"79deab8e-6745-4271-ab22-65f25550e578","Type":"ContainerDied","Data":"44a056753480c7af5318199e4211fa23fb4c508ca5ce2953622d9a7de8a9886d"} Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.535692 5002 generic.go:334] "Generic (PLEG): container finished" podID="79deab8e-6745-4271-ab22-65f25550e578" containerID="b3b4ea05ec9d83831db3f16ffc1cd6e9b9e582c5e435dadfdcf0a2cd4ad7b300" exitCode=0 Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.535849 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7d79b97dc6-pfr8r" event={"ID":"79deab8e-6745-4271-ab22-65f25550e578","Type":"ContainerDied","Data":"b3b4ea05ec9d83831db3f16ffc1cd6e9b9e582c5e435dadfdcf0a2cd4ad7b300"} Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.713805 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.785701 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-combined-ca-bundle\") pod \"79deab8e-6745-4271-ab22-65f25550e578\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.786202 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-config\") pod \"79deab8e-6745-4271-ab22-65f25550e578\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.786226 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-httpd-config\") pod \"79deab8e-6745-4271-ab22-65f25550e578\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.786295 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-ovndb-tls-certs\") pod \"79deab8e-6745-4271-ab22-65f25550e578\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.786385 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7fl9\" (UniqueName: \"kubernetes.io/projected/79deab8e-6745-4271-ab22-65f25550e578-kube-api-access-g7fl9\") pod \"79deab8e-6745-4271-ab22-65f25550e578\" (UID: \"79deab8e-6745-4271-ab22-65f25550e578\") " Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.792024 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79deab8e-6745-4271-ab22-65f25550e578-kube-api-access-g7fl9" (OuterVolumeSpecName: "kube-api-access-g7fl9") pod "79deab8e-6745-4271-ab22-65f25550e578" (UID: "79deab8e-6745-4271-ab22-65f25550e578"). InnerVolumeSpecName "kube-api-access-g7fl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.794645 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "79deab8e-6745-4271-ab22-65f25550e578" (UID: "79deab8e-6745-4271-ab22-65f25550e578"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.841499 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79deab8e-6745-4271-ab22-65f25550e578" (UID: "79deab8e-6745-4271-ab22-65f25550e578"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.850920 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-config" (OuterVolumeSpecName: "config") pod "79deab8e-6745-4271-ab22-65f25550e578" (UID: "79deab8e-6745-4271-ab22-65f25550e578"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.871480 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "79deab8e-6745-4271-ab22-65f25550e578" (UID: "79deab8e-6745-4271-ab22-65f25550e578"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.888907 5002 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.888940 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7fl9\" (UniqueName: \"kubernetes.io/projected/79deab8e-6745-4271-ab22-65f25550e578-kube-api-access-g7fl9\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.888951 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.888963 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:41 crc kubenswrapper[5002]: I1203 18:06:41.888972 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/79deab8e-6745-4271-ab22-65f25550e578-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:42 crc kubenswrapper[5002]: I1203 18:06:42.552219 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7d79b97dc6-pfr8r" event={"ID":"79deab8e-6745-4271-ab22-65f25550e578","Type":"ContainerDied","Data":"a715d953978efdd14fe03bc3a226a25dd9a3c7c789ca33b523cb373ef21a8d60"} Dec 03 18:06:42 crc kubenswrapper[5002]: I1203 18:06:42.552288 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7d79b97dc6-pfr8r" Dec 03 18:06:42 crc kubenswrapper[5002]: I1203 18:06:42.552331 5002 scope.go:117] "RemoveContainer" containerID="44a056753480c7af5318199e4211fa23fb4c508ca5ce2953622d9a7de8a9886d" Dec 03 18:06:42 crc kubenswrapper[5002]: I1203 18:06:42.595856 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7d79b97dc6-pfr8r"] Dec 03 18:06:42 crc kubenswrapper[5002]: I1203 18:06:42.597005 5002 scope.go:117] "RemoveContainer" containerID="b3b4ea05ec9d83831db3f16ffc1cd6e9b9e582c5e435dadfdcf0a2cd4ad7b300" Dec 03 18:06:42 crc kubenswrapper[5002]: I1203 18:06:42.605421 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7d79b97dc6-pfr8r"] Dec 03 18:06:42 crc kubenswrapper[5002]: I1203 18:06:42.849934 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79deab8e-6745-4271-ab22-65f25550e578" path="/var/lib/kubelet/pods/79deab8e-6745-4271-ab22-65f25550e578/volumes" Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.833212 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nmppn"] Dec 03 18:06:43 crc kubenswrapper[5002]: E1203 18:06:43.833643 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9005c62c-87ce-434f-8781-d337d52a7056" containerName="init" Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.833657 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9005c62c-87ce-434f-8781-d337d52a7056" containerName="init" Dec 03 18:06:43 crc kubenswrapper[5002]: E1203 18:06:43.833674 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79deab8e-6745-4271-ab22-65f25550e578" containerName="neutron-httpd" Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.833682 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="79deab8e-6745-4271-ab22-65f25550e578" containerName="neutron-httpd" Dec 03 18:06:43 crc kubenswrapper[5002]: E1203 18:06:43.833707 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79deab8e-6745-4271-ab22-65f25550e578" containerName="neutron-api" Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.833718 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="79deab8e-6745-4271-ab22-65f25550e578" containerName="neutron-api" Dec 03 18:06:43 crc kubenswrapper[5002]: E1203 18:06:43.833738 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9005c62c-87ce-434f-8781-d337d52a7056" containerName="dnsmasq-dns" Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.833769 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9005c62c-87ce-434f-8781-d337d52a7056" containerName="dnsmasq-dns" Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.833961 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="79deab8e-6745-4271-ab22-65f25550e578" containerName="neutron-httpd" Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.833979 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9005c62c-87ce-434f-8781-d337d52a7056" containerName="dnsmasq-dns" Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.833991 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="79deab8e-6745-4271-ab22-65f25550e578" containerName="neutron-api" Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.835496 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.852492 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nmppn"] Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.927669 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-catalog-content\") pod \"certified-operators-nmppn\" (UID: \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\") " pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.927738 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blvh2\" (UniqueName: \"kubernetes.io/projected/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-kube-api-access-blvh2\") pod \"certified-operators-nmppn\" (UID: \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\") " pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:43 crc kubenswrapper[5002]: I1203 18:06:43.927939 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-utilities\") pod \"certified-operators-nmppn\" (UID: \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\") " pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.029920 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-catalog-content\") pod \"certified-operators-nmppn\" (UID: \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\") " pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.029983 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blvh2\" (UniqueName: \"kubernetes.io/projected/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-kube-api-access-blvh2\") pod \"certified-operators-nmppn\" (UID: \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\") " pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.030041 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-utilities\") pod \"certified-operators-nmppn\" (UID: \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\") " pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.030344 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dsw5f"] Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.030510 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-catalog-content\") pod \"certified-operators-nmppn\" (UID: \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\") " pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.030575 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-utilities\") pod \"certified-operators-nmppn\" (UID: \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\") " pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.032620 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.043418 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dsw5f"] Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.061174 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blvh2\" (UniqueName: \"kubernetes.io/projected/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-kube-api-access-blvh2\") pod \"certified-operators-nmppn\" (UID: \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\") " pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.131236 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-utilities\") pod \"community-operators-dsw5f\" (UID: \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\") " pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.131615 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w592b\" (UniqueName: \"kubernetes.io/projected/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-kube-api-access-w592b\") pod \"community-operators-dsw5f\" (UID: \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\") " pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.131669 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-catalog-content\") pod \"community-operators-dsw5f\" (UID: \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\") " pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.161690 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.233315 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w592b\" (UniqueName: \"kubernetes.io/projected/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-kube-api-access-w592b\") pod \"community-operators-dsw5f\" (UID: \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\") " pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.233418 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-catalog-content\") pod \"community-operators-dsw5f\" (UID: \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\") " pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.233455 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-utilities\") pod \"community-operators-dsw5f\" (UID: \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\") " pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.234027 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-catalog-content\") pod \"community-operators-dsw5f\" (UID: \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\") " pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.234073 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-utilities\") pod \"community-operators-dsw5f\" (UID: \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\") " pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.254591 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w592b\" (UniqueName: \"kubernetes.io/projected/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-kube-api-access-w592b\") pod \"community-operators-dsw5f\" (UID: \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\") " pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.351384 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.705142 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nmppn"] Dec 03 18:06:44 crc kubenswrapper[5002]: I1203 18:06:44.899964 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dsw5f"] Dec 03 18:06:45 crc kubenswrapper[5002]: I1203 18:06:45.586070 5002 generic.go:334] "Generic (PLEG): container finished" podID="6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" containerID="49eb5dbc26068f2681925e06fe19af42cad6b361f0fe98c50b7094b00c818005" exitCode=0 Dec 03 18:06:45 crc kubenswrapper[5002]: I1203 18:06:45.586661 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmppn" event={"ID":"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce","Type":"ContainerDied","Data":"49eb5dbc26068f2681925e06fe19af42cad6b361f0fe98c50b7094b00c818005"} Dec 03 18:06:45 crc kubenswrapper[5002]: I1203 18:06:45.586735 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmppn" event={"ID":"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce","Type":"ContainerStarted","Data":"a4fc8969244a6013bad0e8283f08b065f6851ea2b8942bc6716371cc5e131812"} Dec 03 18:06:45 crc kubenswrapper[5002]: I1203 18:06:45.588553 5002 generic.go:334] "Generic (PLEG): container finished" podID="cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" containerID="6bbe1f94eaf1627ab306f105bd5a6e9c2afdbdc2020baf5e47e6607d6c1c032e" exitCode=0 Dec 03 18:06:45 crc kubenswrapper[5002]: I1203 18:06:45.588585 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsw5f" event={"ID":"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6","Type":"ContainerDied","Data":"6bbe1f94eaf1627ab306f105bd5a6e9c2afdbdc2020baf5e47e6607d6c1c032e"} Dec 03 18:06:45 crc kubenswrapper[5002]: I1203 18:06:45.588605 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsw5f" event={"ID":"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6","Type":"ContainerStarted","Data":"eedbd169f8c937c91df513b8f90d54ac6458655ffffee2af5fe64ce6c34e6e8c"} Dec 03 18:06:46 crc kubenswrapper[5002]: I1203 18:06:46.602607 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsw5f" event={"ID":"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6","Type":"ContainerStarted","Data":"f663ecce820b6eac97dcb8a054dc442f6993c9ed5c74e2f0a57d99482037f166"} Dec 03 18:06:46 crc kubenswrapper[5002]: I1203 18:06:46.607406 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmppn" event={"ID":"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce","Type":"ContainerStarted","Data":"8836ffd8a814ea1c8bf4794e39cce0159665d51e5cd472f6b24142990cc2139a"} Dec 03 18:06:47 crc kubenswrapper[5002]: I1203 18:06:47.618004 5002 generic.go:334] "Generic (PLEG): container finished" podID="6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" containerID="8836ffd8a814ea1c8bf4794e39cce0159665d51e5cd472f6b24142990cc2139a" exitCode=0 Dec 03 18:06:47 crc kubenswrapper[5002]: I1203 18:06:47.618064 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmppn" event={"ID":"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce","Type":"ContainerDied","Data":"8836ffd8a814ea1c8bf4794e39cce0159665d51e5cd472f6b24142990cc2139a"} Dec 03 18:06:47 crc kubenswrapper[5002]: I1203 18:06:47.621106 5002 generic.go:334] "Generic (PLEG): container finished" podID="cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" containerID="f663ecce820b6eac97dcb8a054dc442f6993c9ed5c74e2f0a57d99482037f166" exitCode=0 Dec 03 18:06:47 crc kubenswrapper[5002]: I1203 18:06:47.621151 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsw5f" event={"ID":"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6","Type":"ContainerDied","Data":"f663ecce820b6eac97dcb8a054dc442f6993c9ed5c74e2f0a57d99482037f166"} Dec 03 18:06:48 crc kubenswrapper[5002]: I1203 18:06:48.630431 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmppn" event={"ID":"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce","Type":"ContainerStarted","Data":"ecb175047a941ab35c7ce90bbf6b3ed3fdf3abc5797bdb0bf20b07396d8b5539"} Dec 03 18:06:48 crc kubenswrapper[5002]: I1203 18:06:48.633521 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsw5f" event={"ID":"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6","Type":"ContainerStarted","Data":"a08e7336fa36fa0d52827dded0466931a311fd080bb2528664d6c1e0333f8b47"} Dec 03 18:06:48 crc kubenswrapper[5002]: I1203 18:06:48.654216 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nmppn" podStartSLOduration=2.951124918 podStartE2EDuration="5.654202677s" podCreationTimestamp="2025-12-03 18:06:43 +0000 UTC" firstStartedPulling="2025-12-03 18:06:45.590267008 +0000 UTC m=+5729.004088936" lastFinishedPulling="2025-12-03 18:06:48.293344807 +0000 UTC m=+5731.707166695" observedRunningTime="2025-12-03 18:06:48.652267564 +0000 UTC m=+5732.066089452" watchObservedRunningTime="2025-12-03 18:06:48.654202677 +0000 UTC m=+5732.068024565" Dec 03 18:06:48 crc kubenswrapper[5002]: I1203 18:06:48.674695 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dsw5f" podStartSLOduration=2.230487965 podStartE2EDuration="4.674677567s" podCreationTimestamp="2025-12-03 18:06:44 +0000 UTC" firstStartedPulling="2025-12-03 18:06:45.602254811 +0000 UTC m=+5729.016076719" lastFinishedPulling="2025-12-03 18:06:48.046444433 +0000 UTC m=+5731.460266321" observedRunningTime="2025-12-03 18:06:48.66883901 +0000 UTC m=+5732.082660908" watchObservedRunningTime="2025-12-03 18:06:48.674677567 +0000 UTC m=+5732.088499455" Dec 03 18:06:50 crc kubenswrapper[5002]: I1203 18:06:50.916376 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:06:50 crc kubenswrapper[5002]: I1203 18:06:50.917050 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:06:54 crc kubenswrapper[5002]: I1203 18:06:54.162557 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:54 crc kubenswrapper[5002]: I1203 18:06:54.162878 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:54 crc kubenswrapper[5002]: I1203 18:06:54.224017 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:54 crc kubenswrapper[5002]: I1203 18:06:54.351607 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:54 crc kubenswrapper[5002]: I1203 18:06:54.351682 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:54 crc kubenswrapper[5002]: I1203 18:06:54.403082 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:54 crc kubenswrapper[5002]: I1203 18:06:54.733142 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:54 crc kubenswrapper[5002]: I1203 18:06:54.734042 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.222607 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-j5sgf"] Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.223906 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.226567 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.226808 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-z8z85" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.226926 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.228072 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.228782 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.233883 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-j5sgf"] Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.334201 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-swiftconf\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.334649 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/516c703e-41d6-4219-9af9-183f93fed43a-etc-swift\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.334811 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6jrk\" (UniqueName: \"kubernetes.io/projected/516c703e-41d6-4219-9af9-183f93fed43a-kube-api-access-m6jrk\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.334965 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/516c703e-41d6-4219-9af9-183f93fed43a-ring-data-devices\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.335163 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-dispersionconf\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.335322 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/516c703e-41d6-4219-9af9-183f93fed43a-scripts\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.335478 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-combined-ca-bundle\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.390866 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c5f95c47-l4xmq"] Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.392558 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.406380 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c5f95c47-l4xmq"] Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.437505 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-swiftconf\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.437653 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/516c703e-41d6-4219-9af9-183f93fed43a-etc-swift\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.437690 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6jrk\" (UniqueName: \"kubernetes.io/projected/516c703e-41d6-4219-9af9-183f93fed43a-kube-api-access-m6jrk\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.437765 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/516c703e-41d6-4219-9af9-183f93fed43a-ring-data-devices\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.437804 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-dispersionconf\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.437844 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/516c703e-41d6-4219-9af9-183f93fed43a-scripts\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.437885 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-combined-ca-bundle\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.438288 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/516c703e-41d6-4219-9af9-183f93fed43a-etc-swift\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.439011 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/516c703e-41d6-4219-9af9-183f93fed43a-ring-data-devices\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.439847 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/516c703e-41d6-4219-9af9-183f93fed43a-scripts\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.444889 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-swiftconf\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.445004 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-combined-ca-bundle\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.447151 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-dispersionconf\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.470952 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6jrk\" (UniqueName: \"kubernetes.io/projected/516c703e-41d6-4219-9af9-183f93fed43a-kube-api-access-m6jrk\") pod \"swift-ring-rebalance-j5sgf\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.539925 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-ovsdbserver-nb\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.540546 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-dns-svc\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.540788 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-ovsdbserver-sb\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.540931 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssk42\" (UniqueName: \"kubernetes.io/projected/13642341-a68b-43fb-99b6-e10187ca6b8c-kube-api-access-ssk42\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.540997 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-config\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.547269 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.642218 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssk42\" (UniqueName: \"kubernetes.io/projected/13642341-a68b-43fb-99b6-e10187ca6b8c-kube-api-access-ssk42\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.642287 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-config\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.643456 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-config\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.643540 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-ovsdbserver-nb\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.644212 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-ovsdbserver-nb\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.644404 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-dns-svc\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.645120 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-dns-svc\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.645206 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-ovsdbserver-sb\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.645927 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-ovsdbserver-sb\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.665433 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssk42\" (UniqueName: \"kubernetes.io/projected/13642341-a68b-43fb-99b6-e10187ca6b8c-kube-api-access-ssk42\") pod \"dnsmasq-dns-75c5f95c47-l4xmq\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:55 crc kubenswrapper[5002]: I1203 18:06:55.712073 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:56 crc kubenswrapper[5002]: I1203 18:06:56.059025 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-j5sgf"] Dec 03 18:06:56 crc kubenswrapper[5002]: I1203 18:06:56.083949 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dsw5f"] Dec 03 18:06:56 crc kubenswrapper[5002]: I1203 18:06:56.202597 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c5f95c47-l4xmq"] Dec 03 18:06:56 crc kubenswrapper[5002]: W1203 18:06:56.205849 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13642341_a68b_43fb_99b6_e10187ca6b8c.slice/crio-40f3f125ee4e350ba3485a382dbe47f95d026792c3ddae1253a78a0794a3f00a WatchSource:0}: Error finding container 40f3f125ee4e350ba3485a382dbe47f95d026792c3ddae1253a78a0794a3f00a: Status 404 returned error can't find the container with id 40f3f125ee4e350ba3485a382dbe47f95d026792c3ddae1253a78a0794a3f00a Dec 03 18:06:56 crc kubenswrapper[5002]: I1203 18:06:56.721196 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-j5sgf" event={"ID":"516c703e-41d6-4219-9af9-183f93fed43a","Type":"ContainerStarted","Data":"2f69d944fb0b2372ddcb6e0321f64091a6c46938cf02ef2ee10c9c363811d020"} Dec 03 18:06:56 crc kubenswrapper[5002]: I1203 18:06:56.722447 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" event={"ID":"13642341-a68b-43fb-99b6-e10187ca6b8c","Type":"ContainerStarted","Data":"40f3f125ee4e350ba3485a382dbe47f95d026792c3ddae1253a78a0794a3f00a"} Dec 03 18:06:56 crc kubenswrapper[5002]: I1203 18:06:56.722674 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dsw5f" podUID="cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" containerName="registry-server" containerID="cri-o://a08e7336fa36fa0d52827dded0466931a311fd080bb2528664d6c1e0333f8b47" gracePeriod=2 Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.087192 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nmppn"] Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.087665 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nmppn" podUID="6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" containerName="registry-server" containerID="cri-o://ecb175047a941ab35c7ce90bbf6b3ed3fdf3abc5797bdb0bf20b07396d8b5539" gracePeriod=2 Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.733094 5002 generic.go:334] "Generic (PLEG): container finished" podID="6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" containerID="ecb175047a941ab35c7ce90bbf6b3ed3fdf3abc5797bdb0bf20b07396d8b5539" exitCode=0 Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.733177 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmppn" event={"ID":"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce","Type":"ContainerDied","Data":"ecb175047a941ab35c7ce90bbf6b3ed3fdf3abc5797bdb0bf20b07396d8b5539"} Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.733498 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmppn" event={"ID":"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce","Type":"ContainerDied","Data":"a4fc8969244a6013bad0e8283f08b065f6851ea2b8942bc6716371cc5e131812"} Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.733516 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4fc8969244a6013bad0e8283f08b065f6851ea2b8942bc6716371cc5e131812" Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.735786 5002 generic.go:334] "Generic (PLEG): container finished" podID="cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" containerID="a08e7336fa36fa0d52827dded0466931a311fd080bb2528664d6c1e0333f8b47" exitCode=0 Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.735858 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsw5f" event={"ID":"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6","Type":"ContainerDied","Data":"a08e7336fa36fa0d52827dded0466931a311fd080bb2528664d6c1e0333f8b47"} Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.738474 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-j5sgf" event={"ID":"516c703e-41d6-4219-9af9-183f93fed43a","Type":"ContainerStarted","Data":"8ce10b6f27e4f5ea1a3bc87a63cb1c8774c77ca17f0a70da116141c4dee1351f"} Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.740856 5002 generic.go:334] "Generic (PLEG): container finished" podID="13642341-a68b-43fb-99b6-e10187ca6b8c" containerID="99897f4a5a41c5d566ee9152e326929be5108db31303f22e30cf1e89f3885c7d" exitCode=0 Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.740916 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" event={"ID":"13642341-a68b-43fb-99b6-e10187ca6b8c","Type":"ContainerDied","Data":"99897f4a5a41c5d566ee9152e326929be5108db31303f22e30cf1e89f3885c7d"} Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.760649 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-j5sgf" podStartSLOduration=2.760629413 podStartE2EDuration="2.760629413s" podCreationTimestamp="2025-12-03 18:06:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:06:57.757307344 +0000 UTC m=+5741.171129232" watchObservedRunningTime="2025-12-03 18:06:57.760629413 +0000 UTC m=+5741.174451301" Dec 03 18:06:57 crc kubenswrapper[5002]: I1203 18:06:57.934962 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.094784 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-catalog-content\") pod \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\" (UID: \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\") " Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.094851 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-utilities\") pod \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\" (UID: \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\") " Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.094952 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blvh2\" (UniqueName: \"kubernetes.io/projected/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-kube-api-access-blvh2\") pod \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\" (UID: \"6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce\") " Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.095824 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-utilities" (OuterVolumeSpecName: "utilities") pod "6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" (UID: "6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.115201 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-kube-api-access-blvh2" (OuterVolumeSpecName: "kube-api-access-blvh2") pod "6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" (UID: "6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce"). InnerVolumeSpecName "kube-api-access-blvh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.152005 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.158476 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" (UID: "6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.197212 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.197263 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.197278 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blvh2\" (UniqueName: \"kubernetes.io/projected/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce-kube-api-access-blvh2\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.298607 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-catalog-content\") pod \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\" (UID: \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\") " Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.298667 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w592b\" (UniqueName: \"kubernetes.io/projected/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-kube-api-access-w592b\") pod \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\" (UID: \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\") " Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.298737 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-utilities\") pod \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\" (UID: \"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6\") " Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.299779 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-utilities" (OuterVolumeSpecName: "utilities") pod "cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" (UID: "cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.302799 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-kube-api-access-w592b" (OuterVolumeSpecName: "kube-api-access-w592b") pod "cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" (UID: "cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6"). InnerVolumeSpecName "kube-api-access-w592b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.358713 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" (UID: "cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.401010 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.401346 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w592b\" (UniqueName: \"kubernetes.io/projected/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-kube-api-access-w592b\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.401360 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.752872 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsw5f" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.752917 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsw5f" event={"ID":"cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6","Type":"ContainerDied","Data":"eedbd169f8c937c91df513b8f90d54ac6458655ffffee2af5fe64ce6c34e6e8c"} Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.752984 5002 scope.go:117] "RemoveContainer" containerID="a08e7336fa36fa0d52827dded0466931a311fd080bb2528664d6c1e0333f8b47" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.754631 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nmppn" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.754638 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" event={"ID":"13642341-a68b-43fb-99b6-e10187ca6b8c","Type":"ContainerStarted","Data":"c763cac1804b839b808320b26b1651b6b6b0fdd09ecb2a467972dc61a1516510"} Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.756466 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.789211 5002 scope.go:117] "RemoveContainer" containerID="f663ecce820b6eac97dcb8a054dc442f6993c9ed5c74e2f0a57d99482037f166" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.791337 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" podStartSLOduration=3.791295895 podStartE2EDuration="3.791295895s" podCreationTimestamp="2025-12-03 18:06:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:06:58.783869845 +0000 UTC m=+5742.197691743" watchObservedRunningTime="2025-12-03 18:06:58.791295895 +0000 UTC m=+5742.205117783" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.813039 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nmppn"] Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.821193 5002 scope.go:117] "RemoveContainer" containerID="6bbe1f94eaf1627ab306f105bd5a6e9c2afdbdc2020baf5e47e6607d6c1c032e" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.827788 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nmppn"] Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.831893 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dsw5f"] Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.837993 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dsw5f"] Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.852005 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" path="/var/lib/kubelet/pods/6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce/volumes" Dec 03 18:06:58 crc kubenswrapper[5002]: I1203 18:06:58.852645 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" path="/var/lib/kubelet/pods/cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6/volumes" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.129318 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-797d98844b-nvhv6"] Dec 03 18:07:00 crc kubenswrapper[5002]: E1203 18:07:00.130653 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" containerName="extract-utilities" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.130684 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" containerName="extract-utilities" Dec 03 18:07:00 crc kubenswrapper[5002]: E1203 18:07:00.130697 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" containerName="extract-utilities" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.130707 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" containerName="extract-utilities" Dec 03 18:07:00 crc kubenswrapper[5002]: E1203 18:07:00.130728 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" containerName="registry-server" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.130736 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" containerName="registry-server" Dec 03 18:07:00 crc kubenswrapper[5002]: E1203 18:07:00.130779 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" containerName="extract-content" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.130788 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" containerName="extract-content" Dec 03 18:07:00 crc kubenswrapper[5002]: E1203 18:07:00.130808 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" containerName="extract-content" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.130816 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" containerName="extract-content" Dec 03 18:07:00 crc kubenswrapper[5002]: E1203 18:07:00.130859 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" containerName="registry-server" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.130867 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" containerName="registry-server" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.131321 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="cabbe91b-3bfd-4a03-b9c1-1cb73deef7f6" containerName="registry-server" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.131348 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6507bf9e-f6ab-4f29-a2d1-72b5d4da25ce" containerName="registry-server" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.139098 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.143276 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.144294 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.144570 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.148336 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-797d98844b-nvhv6"] Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.242857 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9qzm\" (UniqueName: \"kubernetes.io/projected/8b47040d-5203-453c-af26-fc72fed2651c-kube-api-access-s9qzm\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.242945 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b47040d-5203-453c-af26-fc72fed2651c-run-httpd\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.242979 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8b47040d-5203-453c-af26-fc72fed2651c-etc-swift\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.243007 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b47040d-5203-453c-af26-fc72fed2651c-log-httpd\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.243045 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b47040d-5203-453c-af26-fc72fed2651c-combined-ca-bundle\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.243267 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b47040d-5203-453c-af26-fc72fed2651c-config-data\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.243360 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b47040d-5203-453c-af26-fc72fed2651c-public-tls-certs\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.243518 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b47040d-5203-453c-af26-fc72fed2651c-internal-tls-certs\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.345295 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b47040d-5203-453c-af26-fc72fed2651c-internal-tls-certs\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.345391 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9qzm\" (UniqueName: \"kubernetes.io/projected/8b47040d-5203-453c-af26-fc72fed2651c-kube-api-access-s9qzm\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.345429 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b47040d-5203-453c-af26-fc72fed2651c-run-httpd\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.345465 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8b47040d-5203-453c-af26-fc72fed2651c-etc-swift\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.345520 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b47040d-5203-453c-af26-fc72fed2651c-log-httpd\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.345573 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b47040d-5203-453c-af26-fc72fed2651c-combined-ca-bundle\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.345614 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b47040d-5203-453c-af26-fc72fed2651c-config-data\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.345638 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b47040d-5203-453c-af26-fc72fed2651c-public-tls-certs\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.346198 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b47040d-5203-453c-af26-fc72fed2651c-run-httpd\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.346218 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b47040d-5203-453c-af26-fc72fed2651c-log-httpd\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.361178 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b47040d-5203-453c-af26-fc72fed2651c-public-tls-certs\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.361938 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b47040d-5203-453c-af26-fc72fed2651c-combined-ca-bundle\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.362092 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b47040d-5203-453c-af26-fc72fed2651c-internal-tls-certs\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.362690 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b47040d-5203-453c-af26-fc72fed2651c-config-data\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.364937 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8b47040d-5203-453c-af26-fc72fed2651c-etc-swift\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.372115 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9qzm\" (UniqueName: \"kubernetes.io/projected/8b47040d-5203-453c-af26-fc72fed2651c-kube-api-access-s9qzm\") pod \"swift-proxy-797d98844b-nvhv6\" (UID: \"8b47040d-5203-453c-af26-fc72fed2651c\") " pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:00 crc kubenswrapper[5002]: I1203 18:07:00.461369 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:01 crc kubenswrapper[5002]: I1203 18:07:01.241785 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-797d98844b-nvhv6"] Dec 03 18:07:01 crc kubenswrapper[5002]: I1203 18:07:01.783027 5002 generic.go:334] "Generic (PLEG): container finished" podID="516c703e-41d6-4219-9af9-183f93fed43a" containerID="8ce10b6f27e4f5ea1a3bc87a63cb1c8774c77ca17f0a70da116141c4dee1351f" exitCode=0 Dec 03 18:07:01 crc kubenswrapper[5002]: I1203 18:07:01.783132 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-j5sgf" event={"ID":"516c703e-41d6-4219-9af9-183f93fed43a","Type":"ContainerDied","Data":"8ce10b6f27e4f5ea1a3bc87a63cb1c8774c77ca17f0a70da116141c4dee1351f"} Dec 03 18:07:01 crc kubenswrapper[5002]: I1203 18:07:01.786522 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-797d98844b-nvhv6" event={"ID":"8b47040d-5203-453c-af26-fc72fed2651c","Type":"ContainerStarted","Data":"6a696d57e8ec3633b219bdcc0ae2dcb9ca54f2d1e32713b75f4a26368e07cb8c"} Dec 03 18:07:01 crc kubenswrapper[5002]: I1203 18:07:01.786559 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-797d98844b-nvhv6" event={"ID":"8b47040d-5203-453c-af26-fc72fed2651c","Type":"ContainerStarted","Data":"c32cec41abc8497761b258ac9fbece33d8fbceb1bfdaecad22fe508dd3d3da92"} Dec 03 18:07:01 crc kubenswrapper[5002]: I1203 18:07:01.786574 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-797d98844b-nvhv6" event={"ID":"8b47040d-5203-453c-af26-fc72fed2651c","Type":"ContainerStarted","Data":"1ec2c2f7b4b3d423cc02eb3bc75782ef11269e6f27342b8d4ccd77ab645dec88"} Dec 03 18:07:01 crc kubenswrapper[5002]: I1203 18:07:01.787203 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:01 crc kubenswrapper[5002]: I1203 18:07:01.787315 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:01 crc kubenswrapper[5002]: I1203 18:07:01.839944 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-797d98844b-nvhv6" podStartSLOduration=1.839925241 podStartE2EDuration="1.839925241s" podCreationTimestamp="2025-12-03 18:07:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:07:01.8321042 +0000 UTC m=+5745.245926088" watchObservedRunningTime="2025-12-03 18:07:01.839925241 +0000 UTC m=+5745.253747129" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.156116 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.308535 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-swiftconf\") pod \"516c703e-41d6-4219-9af9-183f93fed43a\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.308643 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/516c703e-41d6-4219-9af9-183f93fed43a-scripts\") pod \"516c703e-41d6-4219-9af9-183f93fed43a\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.308798 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-combined-ca-bundle\") pod \"516c703e-41d6-4219-9af9-183f93fed43a\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.309629 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6jrk\" (UniqueName: \"kubernetes.io/projected/516c703e-41d6-4219-9af9-183f93fed43a-kube-api-access-m6jrk\") pod \"516c703e-41d6-4219-9af9-183f93fed43a\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.309945 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/516c703e-41d6-4219-9af9-183f93fed43a-ring-data-devices\") pod \"516c703e-41d6-4219-9af9-183f93fed43a\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.310024 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-dispersionconf\") pod \"516c703e-41d6-4219-9af9-183f93fed43a\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.310125 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/516c703e-41d6-4219-9af9-183f93fed43a-etc-swift\") pod \"516c703e-41d6-4219-9af9-183f93fed43a\" (UID: \"516c703e-41d6-4219-9af9-183f93fed43a\") " Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.311831 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/516c703e-41d6-4219-9af9-183f93fed43a-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "516c703e-41d6-4219-9af9-183f93fed43a" (UID: "516c703e-41d6-4219-9af9-183f93fed43a"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.312231 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/516c703e-41d6-4219-9af9-183f93fed43a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "516c703e-41d6-4219-9af9-183f93fed43a" (UID: "516c703e-41d6-4219-9af9-183f93fed43a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.317026 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/516c703e-41d6-4219-9af9-183f93fed43a-kube-api-access-m6jrk" (OuterVolumeSpecName: "kube-api-access-m6jrk") pod "516c703e-41d6-4219-9af9-183f93fed43a" (UID: "516c703e-41d6-4219-9af9-183f93fed43a"). InnerVolumeSpecName "kube-api-access-m6jrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.319972 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "516c703e-41d6-4219-9af9-183f93fed43a" (UID: "516c703e-41d6-4219-9af9-183f93fed43a"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.334436 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/516c703e-41d6-4219-9af9-183f93fed43a-scripts" (OuterVolumeSpecName: "scripts") pod "516c703e-41d6-4219-9af9-183f93fed43a" (UID: "516c703e-41d6-4219-9af9-183f93fed43a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.336885 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "516c703e-41d6-4219-9af9-183f93fed43a" (UID: "516c703e-41d6-4219-9af9-183f93fed43a"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.339127 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "516c703e-41d6-4219-9af9-183f93fed43a" (UID: "516c703e-41d6-4219-9af9-183f93fed43a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.413579 5002 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/516c703e-41d6-4219-9af9-183f93fed43a-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.413619 5002 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.413629 5002 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/516c703e-41d6-4219-9af9-183f93fed43a-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.413639 5002 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.413650 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/516c703e-41d6-4219-9af9-183f93fed43a-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.413659 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/516c703e-41d6-4219-9af9-183f93fed43a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.413670 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6jrk\" (UniqueName: \"kubernetes.io/projected/516c703e-41d6-4219-9af9-183f93fed43a-kube-api-access-m6jrk\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.804685 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-j5sgf" event={"ID":"516c703e-41d6-4219-9af9-183f93fed43a","Type":"ContainerDied","Data":"2f69d944fb0b2372ddcb6e0321f64091a6c46938cf02ef2ee10c9c363811d020"} Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.804722 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f69d944fb0b2372ddcb6e0321f64091a6c46938cf02ef2ee10c9c363811d020" Dec 03 18:07:03 crc kubenswrapper[5002]: I1203 18:07:03.804805 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-j5sgf" Dec 03 18:07:05 crc kubenswrapper[5002]: I1203 18:07:05.713907 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:07:05 crc kubenswrapper[5002]: I1203 18:07:05.771671 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77598f7887-6vfxd"] Dec 03 18:07:05 crc kubenswrapper[5002]: I1203 18:07:05.771997 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" podUID="c5edca1a-5243-45a7-8700-19a54a2cd1b2" containerName="dnsmasq-dns" containerID="cri-o://ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009" gracePeriod=10 Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.297402 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.385889 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-dns-svc\") pod \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.385987 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hj5ln\" (UniqueName: \"kubernetes.io/projected/c5edca1a-5243-45a7-8700-19a54a2cd1b2-kube-api-access-hj5ln\") pod \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.386071 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-ovsdbserver-nb\") pod \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.386087 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-config\") pod \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.386147 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-ovsdbserver-sb\") pod \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.391329 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5edca1a-5243-45a7-8700-19a54a2cd1b2-kube-api-access-hj5ln" (OuterVolumeSpecName: "kube-api-access-hj5ln") pod "c5edca1a-5243-45a7-8700-19a54a2cd1b2" (UID: "c5edca1a-5243-45a7-8700-19a54a2cd1b2"). InnerVolumeSpecName "kube-api-access-hj5ln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.434031 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c5edca1a-5243-45a7-8700-19a54a2cd1b2" (UID: "c5edca1a-5243-45a7-8700-19a54a2cd1b2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.437823 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c5edca1a-5243-45a7-8700-19a54a2cd1b2" (UID: "c5edca1a-5243-45a7-8700-19a54a2cd1b2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:07:06 crc kubenswrapper[5002]: E1203 18:07:06.441063 5002 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-config podName:c5edca1a-5243-45a7-8700-19a54a2cd1b2 nodeName:}" failed. No retries permitted until 2025-12-03 18:07:06.941039189 +0000 UTC m=+5750.354861077 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config" (UniqueName: "kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-config") pod "c5edca1a-5243-45a7-8700-19a54a2cd1b2" (UID: "c5edca1a-5243-45a7-8700-19a54a2cd1b2") : error deleting /var/lib/kubelet/pods/c5edca1a-5243-45a7-8700-19a54a2cd1b2/volume-subpaths: remove /var/lib/kubelet/pods/c5edca1a-5243-45a7-8700-19a54a2cd1b2/volume-subpaths: no such file or directory Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.441461 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c5edca1a-5243-45a7-8700-19a54a2cd1b2" (UID: "c5edca1a-5243-45a7-8700-19a54a2cd1b2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.487762 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.487803 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hj5ln\" (UniqueName: \"kubernetes.io/projected/c5edca1a-5243-45a7-8700-19a54a2cd1b2-kube-api-access-hj5ln\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.487815 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.487826 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.829249 5002 generic.go:334] "Generic (PLEG): container finished" podID="c5edca1a-5243-45a7-8700-19a54a2cd1b2" containerID="ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009" exitCode=0 Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.829283 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" event={"ID":"c5edca1a-5243-45a7-8700-19a54a2cd1b2","Type":"ContainerDied","Data":"ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009"} Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.829325 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" event={"ID":"c5edca1a-5243-45a7-8700-19a54a2cd1b2","Type":"ContainerDied","Data":"17cedb1eea1eaa398f10939c4d390201adf2d9e5d57634a147ededfa79ef5b75"} Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.829349 5002 scope.go:117] "RemoveContainer" containerID="ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.829373 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77598f7887-6vfxd" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.854457 5002 scope.go:117] "RemoveContainer" containerID="e48762ad8041c38d72288d9257ed0cf66a53a28f9024dfff418e1e8131776d23" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.880139 5002 scope.go:117] "RemoveContainer" containerID="ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009" Dec 03 18:07:06 crc kubenswrapper[5002]: E1203 18:07:06.880881 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009\": container with ID starting with ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009 not found: ID does not exist" containerID="ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.880924 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009"} err="failed to get container status \"ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009\": rpc error: code = NotFound desc = could not find container \"ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009\": container with ID starting with ba209192012d18185e50896154e69ceb4ffb4446422d3cea306aa3076abbb009 not found: ID does not exist" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.880953 5002 scope.go:117] "RemoveContainer" containerID="e48762ad8041c38d72288d9257ed0cf66a53a28f9024dfff418e1e8131776d23" Dec 03 18:07:06 crc kubenswrapper[5002]: E1203 18:07:06.881408 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e48762ad8041c38d72288d9257ed0cf66a53a28f9024dfff418e1e8131776d23\": container with ID starting with e48762ad8041c38d72288d9257ed0cf66a53a28f9024dfff418e1e8131776d23 not found: ID does not exist" containerID="e48762ad8041c38d72288d9257ed0cf66a53a28f9024dfff418e1e8131776d23" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.881536 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e48762ad8041c38d72288d9257ed0cf66a53a28f9024dfff418e1e8131776d23"} err="failed to get container status \"e48762ad8041c38d72288d9257ed0cf66a53a28f9024dfff418e1e8131776d23\": rpc error: code = NotFound desc = could not find container \"e48762ad8041c38d72288d9257ed0cf66a53a28f9024dfff418e1e8131776d23\": container with ID starting with e48762ad8041c38d72288d9257ed0cf66a53a28f9024dfff418e1e8131776d23 not found: ID does not exist" Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.995385 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-config\") pod \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\" (UID: \"c5edca1a-5243-45a7-8700-19a54a2cd1b2\") " Dec 03 18:07:06 crc kubenswrapper[5002]: I1203 18:07:06.996154 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-config" (OuterVolumeSpecName: "config") pod "c5edca1a-5243-45a7-8700-19a54a2cd1b2" (UID: "c5edca1a-5243-45a7-8700-19a54a2cd1b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:07:07 crc kubenswrapper[5002]: I1203 18:07:07.098306 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5edca1a-5243-45a7-8700-19a54a2cd1b2-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:07 crc kubenswrapper[5002]: I1203 18:07:07.160869 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77598f7887-6vfxd"] Dec 03 18:07:07 crc kubenswrapper[5002]: I1203 18:07:07.168040 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77598f7887-6vfxd"] Dec 03 18:07:08 crc kubenswrapper[5002]: I1203 18:07:08.860208 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5edca1a-5243-45a7-8700-19a54a2cd1b2" path="/var/lib/kubelet/pods/c5edca1a-5243-45a7-8700-19a54a2cd1b2/volumes" Dec 03 18:07:10 crc kubenswrapper[5002]: I1203 18:07:10.466574 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:10 crc kubenswrapper[5002]: I1203 18:07:10.474664 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-797d98844b-nvhv6" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.514448 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-wrccs"] Dec 03 18:07:16 crc kubenswrapper[5002]: E1203 18:07:16.516521 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5edca1a-5243-45a7-8700-19a54a2cd1b2" containerName="dnsmasq-dns" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.516652 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5edca1a-5243-45a7-8700-19a54a2cd1b2" containerName="dnsmasq-dns" Dec 03 18:07:16 crc kubenswrapper[5002]: E1203 18:07:16.516873 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5edca1a-5243-45a7-8700-19a54a2cd1b2" containerName="init" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.516992 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5edca1a-5243-45a7-8700-19a54a2cd1b2" containerName="init" Dec 03 18:07:16 crc kubenswrapper[5002]: E1203 18:07:16.517091 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="516c703e-41d6-4219-9af9-183f93fed43a" containerName="swift-ring-rebalance" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.517176 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="516c703e-41d6-4219-9af9-183f93fed43a" containerName="swift-ring-rebalance" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.517460 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="516c703e-41d6-4219-9af9-183f93fed43a" containerName="swift-ring-rebalance" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.517567 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5edca1a-5243-45a7-8700-19a54a2cd1b2" containerName="dnsmasq-dns" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.518471 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-wrccs" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.523171 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-wrccs"] Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.616769 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-ee79-account-create-update-gpcb8"] Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.618185 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ee79-account-create-update-gpcb8" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.619981 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.633774 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-ee79-account-create-update-gpcb8"] Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.668600 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4mth\" (UniqueName: \"kubernetes.io/projected/36ce123f-22ef-459f-aa84-40ac04d8a5ac-kube-api-access-b4mth\") pod \"cinder-db-create-wrccs\" (UID: \"36ce123f-22ef-459f-aa84-40ac04d8a5ac\") " pod="openstack/cinder-db-create-wrccs" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.669052 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36ce123f-22ef-459f-aa84-40ac04d8a5ac-operator-scripts\") pod \"cinder-db-create-wrccs\" (UID: \"36ce123f-22ef-459f-aa84-40ac04d8a5ac\") " pod="openstack/cinder-db-create-wrccs" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.770199 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4mth\" (UniqueName: \"kubernetes.io/projected/36ce123f-22ef-459f-aa84-40ac04d8a5ac-kube-api-access-b4mth\") pod \"cinder-db-create-wrccs\" (UID: \"36ce123f-22ef-459f-aa84-40ac04d8a5ac\") " pod="openstack/cinder-db-create-wrccs" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.770637 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f-operator-scripts\") pod \"cinder-ee79-account-create-update-gpcb8\" (UID: \"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f\") " pod="openstack/cinder-ee79-account-create-update-gpcb8" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.770814 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36ce123f-22ef-459f-aa84-40ac04d8a5ac-operator-scripts\") pod \"cinder-db-create-wrccs\" (UID: \"36ce123f-22ef-459f-aa84-40ac04d8a5ac\") " pod="openstack/cinder-db-create-wrccs" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.770986 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9h698\" (UniqueName: \"kubernetes.io/projected/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f-kube-api-access-9h698\") pod \"cinder-ee79-account-create-update-gpcb8\" (UID: \"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f\") " pod="openstack/cinder-ee79-account-create-update-gpcb8" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.771725 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36ce123f-22ef-459f-aa84-40ac04d8a5ac-operator-scripts\") pod \"cinder-db-create-wrccs\" (UID: \"36ce123f-22ef-459f-aa84-40ac04d8a5ac\") " pod="openstack/cinder-db-create-wrccs" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.795896 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4mth\" (UniqueName: \"kubernetes.io/projected/36ce123f-22ef-459f-aa84-40ac04d8a5ac-kube-api-access-b4mth\") pod \"cinder-db-create-wrccs\" (UID: \"36ce123f-22ef-459f-aa84-40ac04d8a5ac\") " pod="openstack/cinder-db-create-wrccs" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.837737 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-wrccs" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.872723 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9h698\" (UniqueName: \"kubernetes.io/projected/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f-kube-api-access-9h698\") pod \"cinder-ee79-account-create-update-gpcb8\" (UID: \"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f\") " pod="openstack/cinder-ee79-account-create-update-gpcb8" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.873150 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f-operator-scripts\") pod \"cinder-ee79-account-create-update-gpcb8\" (UID: \"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f\") " pod="openstack/cinder-ee79-account-create-update-gpcb8" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.873846 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f-operator-scripts\") pod \"cinder-ee79-account-create-update-gpcb8\" (UID: \"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f\") " pod="openstack/cinder-ee79-account-create-update-gpcb8" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.891285 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9h698\" (UniqueName: \"kubernetes.io/projected/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f-kube-api-access-9h698\") pod \"cinder-ee79-account-create-update-gpcb8\" (UID: \"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f\") " pod="openstack/cinder-ee79-account-create-update-gpcb8" Dec 03 18:07:16 crc kubenswrapper[5002]: I1203 18:07:16.937493 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ee79-account-create-update-gpcb8" Dec 03 18:07:17 crc kubenswrapper[5002]: I1203 18:07:17.299668 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-wrccs"] Dec 03 18:07:17 crc kubenswrapper[5002]: W1203 18:07:17.300477 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36ce123f_22ef_459f_aa84_40ac04d8a5ac.slice/crio-a142d46849c543a333bd13c2190775ecafb3c54e469282bab7a054184b82c68f WatchSource:0}: Error finding container a142d46849c543a333bd13c2190775ecafb3c54e469282bab7a054184b82c68f: Status 404 returned error can't find the container with id a142d46849c543a333bd13c2190775ecafb3c54e469282bab7a054184b82c68f Dec 03 18:07:17 crc kubenswrapper[5002]: I1203 18:07:17.382102 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-ee79-account-create-update-gpcb8"] Dec 03 18:07:17 crc kubenswrapper[5002]: I1203 18:07:17.398116 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 03 18:07:17 crc kubenswrapper[5002]: I1203 18:07:17.945469 5002 generic.go:334] "Generic (PLEG): container finished" podID="36ce123f-22ef-459f-aa84-40ac04d8a5ac" containerID="b382000f85dc3f39894b6a9e980dadc0fd5e81c338831e357bb5f45bbdaf37cf" exitCode=0 Dec 03 18:07:17 crc kubenswrapper[5002]: I1203 18:07:17.945520 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-wrccs" event={"ID":"36ce123f-22ef-459f-aa84-40ac04d8a5ac","Type":"ContainerDied","Data":"b382000f85dc3f39894b6a9e980dadc0fd5e81c338831e357bb5f45bbdaf37cf"} Dec 03 18:07:17 crc kubenswrapper[5002]: I1203 18:07:17.945832 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-wrccs" event={"ID":"36ce123f-22ef-459f-aa84-40ac04d8a5ac","Type":"ContainerStarted","Data":"a142d46849c543a333bd13c2190775ecafb3c54e469282bab7a054184b82c68f"} Dec 03 18:07:17 crc kubenswrapper[5002]: I1203 18:07:17.947881 5002 generic.go:334] "Generic (PLEG): container finished" podID="eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f" containerID="ae845aadb52a499aca93f0eb1c28ccb744ad47f7a7bc6df9bffda4870f0e58f3" exitCode=0 Dec 03 18:07:17 crc kubenswrapper[5002]: I1203 18:07:17.947908 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ee79-account-create-update-gpcb8" event={"ID":"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f","Type":"ContainerDied","Data":"ae845aadb52a499aca93f0eb1c28ccb744ad47f7a7bc6df9bffda4870f0e58f3"} Dec 03 18:07:17 crc kubenswrapper[5002]: I1203 18:07:17.947944 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ee79-account-create-update-gpcb8" event={"ID":"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f","Type":"ContainerStarted","Data":"feba72c8df3d925c7cf78d23dab61e09aa996c0161f67d3782ecf571b587618f"} Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.378358 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ee79-account-create-update-gpcb8" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.386454 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-wrccs" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.527262 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9h698\" (UniqueName: \"kubernetes.io/projected/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f-kube-api-access-9h698\") pod \"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f\" (UID: \"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f\") " Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.527458 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f-operator-scripts\") pod \"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f\" (UID: \"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f\") " Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.527587 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36ce123f-22ef-459f-aa84-40ac04d8a5ac-operator-scripts\") pod \"36ce123f-22ef-459f-aa84-40ac04d8a5ac\" (UID: \"36ce123f-22ef-459f-aa84-40ac04d8a5ac\") " Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.527624 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4mth\" (UniqueName: \"kubernetes.io/projected/36ce123f-22ef-459f-aa84-40ac04d8a5ac-kube-api-access-b4mth\") pod \"36ce123f-22ef-459f-aa84-40ac04d8a5ac\" (UID: \"36ce123f-22ef-459f-aa84-40ac04d8a5ac\") " Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.528911 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36ce123f-22ef-459f-aa84-40ac04d8a5ac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "36ce123f-22ef-459f-aa84-40ac04d8a5ac" (UID: "36ce123f-22ef-459f-aa84-40ac04d8a5ac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.529133 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f" (UID: "eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.534981 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36ce123f-22ef-459f-aa84-40ac04d8a5ac-kube-api-access-b4mth" (OuterVolumeSpecName: "kube-api-access-b4mth") pod "36ce123f-22ef-459f-aa84-40ac04d8a5ac" (UID: "36ce123f-22ef-459f-aa84-40ac04d8a5ac"). InnerVolumeSpecName "kube-api-access-b4mth". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.537137 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f-kube-api-access-9h698" (OuterVolumeSpecName: "kube-api-access-9h698") pod "eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f" (UID: "eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f"). InnerVolumeSpecName "kube-api-access-9h698". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.629799 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36ce123f-22ef-459f-aa84-40ac04d8a5ac-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.629862 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4mth\" (UniqueName: \"kubernetes.io/projected/36ce123f-22ef-459f-aa84-40ac04d8a5ac-kube-api-access-b4mth\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.629878 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9h698\" (UniqueName: \"kubernetes.io/projected/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f-kube-api-access-9h698\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.629891 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.967635 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ee79-account-create-update-gpcb8" event={"ID":"eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f","Type":"ContainerDied","Data":"feba72c8df3d925c7cf78d23dab61e09aa996c0161f67d3782ecf571b587618f"} Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.967689 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="feba72c8df3d925c7cf78d23dab61e09aa996c0161f67d3782ecf571b587618f" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.967646 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ee79-account-create-update-gpcb8" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.981889 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-wrccs" event={"ID":"36ce123f-22ef-459f-aa84-40ac04d8a5ac","Type":"ContainerDied","Data":"a142d46849c543a333bd13c2190775ecafb3c54e469282bab7a054184b82c68f"} Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.981956 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a142d46849c543a333bd13c2190775ecafb3c54e469282bab7a054184b82c68f" Dec 03 18:07:19 crc kubenswrapper[5002]: I1203 18:07:19.982039 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-wrccs" Dec 03 18:07:20 crc kubenswrapper[5002]: I1203 18:07:20.917812 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:07:20 crc kubenswrapper[5002]: I1203 18:07:20.918166 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.869997 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-ph8lh"] Dec 03 18:07:21 crc kubenswrapper[5002]: E1203 18:07:21.870422 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f" containerName="mariadb-account-create-update" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.870457 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f" containerName="mariadb-account-create-update" Dec 03 18:07:21 crc kubenswrapper[5002]: E1203 18:07:21.870483 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36ce123f-22ef-459f-aa84-40ac04d8a5ac" containerName="mariadb-database-create" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.870491 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="36ce123f-22ef-459f-aa84-40ac04d8a5ac" containerName="mariadb-database-create" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.870707 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f" containerName="mariadb-account-create-update" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.870732 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="36ce123f-22ef-459f-aa84-40ac04d8a5ac" containerName="mariadb-database-create" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.871452 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.873970 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.875037 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-8clpl" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.875914 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.882507 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-ph8lh"] Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.973311 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-db-sync-config-data\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.973785 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-config-data\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.973861 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwblq\" (UniqueName: \"kubernetes.io/projected/5c437a30-65ce-4e57-9091-7d670bd45e54-kube-api-access-dwblq\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.973941 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-scripts\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.974092 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5c437a30-65ce-4e57-9091-7d670bd45e54-etc-machine-id\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:21 crc kubenswrapper[5002]: I1203 18:07:21.974118 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-combined-ca-bundle\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.076291 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5c437a30-65ce-4e57-9091-7d670bd45e54-etc-machine-id\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.076357 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-combined-ca-bundle\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.076409 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-db-sync-config-data\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.076448 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5c437a30-65ce-4e57-9091-7d670bd45e54-etc-machine-id\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.076490 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-config-data\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.076660 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwblq\" (UniqueName: \"kubernetes.io/projected/5c437a30-65ce-4e57-9091-7d670bd45e54-kube-api-access-dwblq\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.076797 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-scripts\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.081880 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-scripts\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.081918 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-db-sync-config-data\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.082694 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-combined-ca-bundle\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.082898 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-config-data\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.097548 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwblq\" (UniqueName: \"kubernetes.io/projected/5c437a30-65ce-4e57-9091-7d670bd45e54-kube-api-access-dwblq\") pod \"cinder-db-sync-ph8lh\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.191118 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:22 crc kubenswrapper[5002]: I1203 18:07:22.499517 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-ph8lh"] Dec 03 18:07:22 crc kubenswrapper[5002]: W1203 18:07:22.501262 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c437a30_65ce_4e57_9091_7d670bd45e54.slice/crio-5654f5a8066c6d29bf4bcc5205bf9340edf9ace13546472490c7c0e5180f3345 WatchSource:0}: Error finding container 5654f5a8066c6d29bf4bcc5205bf9340edf9ace13546472490c7c0e5180f3345: Status 404 returned error can't find the container with id 5654f5a8066c6d29bf4bcc5205bf9340edf9ace13546472490c7c0e5180f3345 Dec 03 18:07:23 crc kubenswrapper[5002]: I1203 18:07:23.009779 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ph8lh" event={"ID":"5c437a30-65ce-4e57-9091-7d670bd45e54","Type":"ContainerStarted","Data":"5654f5a8066c6d29bf4bcc5205bf9340edf9ace13546472490c7c0e5180f3345"} Dec 03 18:07:24 crc kubenswrapper[5002]: I1203 18:07:24.022104 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ph8lh" event={"ID":"5c437a30-65ce-4e57-9091-7d670bd45e54","Type":"ContainerStarted","Data":"d52032ffb3eaf234e6896c2d91456e29f08121b634d471eee11529936ca1f206"} Dec 03 18:07:24 crc kubenswrapper[5002]: I1203 18:07:24.057448 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-ph8lh" podStartSLOduration=3.057429294 podStartE2EDuration="3.057429294s" podCreationTimestamp="2025-12-03 18:07:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:07:24.055958744 +0000 UTC m=+5767.469780632" watchObservedRunningTime="2025-12-03 18:07:24.057429294 +0000 UTC m=+5767.471251192" Dec 03 18:07:26 crc kubenswrapper[5002]: I1203 18:07:26.049544 5002 generic.go:334] "Generic (PLEG): container finished" podID="5c437a30-65ce-4e57-9091-7d670bd45e54" containerID="d52032ffb3eaf234e6896c2d91456e29f08121b634d471eee11529936ca1f206" exitCode=0 Dec 03 18:07:26 crc kubenswrapper[5002]: I1203 18:07:26.049741 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ph8lh" event={"ID":"5c437a30-65ce-4e57-9091-7d670bd45e54","Type":"ContainerDied","Data":"d52032ffb3eaf234e6896c2d91456e29f08121b634d471eee11529936ca1f206"} Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.440529 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.490888 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwblq\" (UniqueName: \"kubernetes.io/projected/5c437a30-65ce-4e57-9091-7d670bd45e54-kube-api-access-dwblq\") pod \"5c437a30-65ce-4e57-9091-7d670bd45e54\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.490971 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-db-sync-config-data\") pod \"5c437a30-65ce-4e57-9091-7d670bd45e54\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.491035 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-combined-ca-bundle\") pod \"5c437a30-65ce-4e57-9091-7d670bd45e54\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.491165 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-config-data\") pod \"5c437a30-65ce-4e57-9091-7d670bd45e54\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.491362 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-scripts\") pod \"5c437a30-65ce-4e57-9091-7d670bd45e54\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.491419 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5c437a30-65ce-4e57-9091-7d670bd45e54-etc-machine-id\") pod \"5c437a30-65ce-4e57-9091-7d670bd45e54\" (UID: \"5c437a30-65ce-4e57-9091-7d670bd45e54\") " Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.491791 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5c437a30-65ce-4e57-9091-7d670bd45e54-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "5c437a30-65ce-4e57-9091-7d670bd45e54" (UID: "5c437a30-65ce-4e57-9091-7d670bd45e54"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.492070 5002 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5c437a30-65ce-4e57-9091-7d670bd45e54-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.498010 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-scripts" (OuterVolumeSpecName: "scripts") pod "5c437a30-65ce-4e57-9091-7d670bd45e54" (UID: "5c437a30-65ce-4e57-9091-7d670bd45e54"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.498197 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5c437a30-65ce-4e57-9091-7d670bd45e54" (UID: "5c437a30-65ce-4e57-9091-7d670bd45e54"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.498488 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c437a30-65ce-4e57-9091-7d670bd45e54-kube-api-access-dwblq" (OuterVolumeSpecName: "kube-api-access-dwblq") pod "5c437a30-65ce-4e57-9091-7d670bd45e54" (UID: "5c437a30-65ce-4e57-9091-7d670bd45e54"). InnerVolumeSpecName "kube-api-access-dwblq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.547857 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c437a30-65ce-4e57-9091-7d670bd45e54" (UID: "5c437a30-65ce-4e57-9091-7d670bd45e54"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.561587 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-config-data" (OuterVolumeSpecName: "config-data") pod "5c437a30-65ce-4e57-9091-7d670bd45e54" (UID: "5c437a30-65ce-4e57-9091-7d670bd45e54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.594518 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.594555 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.594567 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwblq\" (UniqueName: \"kubernetes.io/projected/5c437a30-65ce-4e57-9091-7d670bd45e54-kube-api-access-dwblq\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.594578 5002 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:27 crc kubenswrapper[5002]: I1203 18:07:27.594587 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c437a30-65ce-4e57-9091-7d670bd45e54-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.067804 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ph8lh" event={"ID":"5c437a30-65ce-4e57-9091-7d670bd45e54","Type":"ContainerDied","Data":"5654f5a8066c6d29bf4bcc5205bf9340edf9ace13546472490c7c0e5180f3345"} Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.067848 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5654f5a8066c6d29bf4bcc5205bf9340edf9ace13546472490c7c0e5180f3345" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.067891 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ph8lh" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.396116 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5457b7bdcf-k79t9"] Dec 03 18:07:28 crc kubenswrapper[5002]: E1203 18:07:28.397724 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c437a30-65ce-4e57-9091-7d670bd45e54" containerName="cinder-db-sync" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.397764 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c437a30-65ce-4e57-9091-7d670bd45e54" containerName="cinder-db-sync" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.398016 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c437a30-65ce-4e57-9091-7d670bd45e54" containerName="cinder-db-sync" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.398960 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.419040 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5457b7bdcf-k79t9"] Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.494610 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.497370 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.501389 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.501693 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.501917 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-8clpl" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.502105 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.507466 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.509297 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-ovsdbserver-sb\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.509387 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-config\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.509484 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q2hg\" (UniqueName: \"kubernetes.io/projected/acda6a07-aaed-4e3d-8b41-4bee23141f2f-kube-api-access-4q2hg\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.509621 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-ovsdbserver-nb\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.509779 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-dns-svc\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.611660 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-ovsdbserver-sb\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.611733 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-scripts\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.611794 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.611832 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-config\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.611866 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2b5464c4-7767-43f0-b745-7e005b340c78-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.611912 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhvd9\" (UniqueName: \"kubernetes.io/projected/2b5464c4-7767-43f0-b745-7e005b340c78-kube-api-access-xhvd9\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.611940 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b5464c4-7767-43f0-b745-7e005b340c78-logs\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.611961 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-config-data-custom\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.612000 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q2hg\" (UniqueName: \"kubernetes.io/projected/acda6a07-aaed-4e3d-8b41-4bee23141f2f-kube-api-access-4q2hg\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.612025 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-config-data\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.612051 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-ovsdbserver-nb\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.612088 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-dns-svc\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.613167 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-dns-svc\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.613877 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-ovsdbserver-sb\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.614421 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-config\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.614902 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-ovsdbserver-nb\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.641602 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q2hg\" (UniqueName: \"kubernetes.io/projected/acda6a07-aaed-4e3d-8b41-4bee23141f2f-kube-api-access-4q2hg\") pod \"dnsmasq-dns-5457b7bdcf-k79t9\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.713326 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-scripts\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.713399 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.713442 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2b5464c4-7767-43f0-b745-7e005b340c78-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.713488 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhvd9\" (UniqueName: \"kubernetes.io/projected/2b5464c4-7767-43f0-b745-7e005b340c78-kube-api-access-xhvd9\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.713517 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b5464c4-7767-43f0-b745-7e005b340c78-logs\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.713536 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-config-data-custom\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.713575 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-config-data\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.714328 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2b5464c4-7767-43f0-b745-7e005b340c78-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.714928 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b5464c4-7767-43f0-b745-7e005b340c78-logs\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.719221 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-config-data-custom\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.719378 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.719997 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-config-data\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.720431 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-scripts\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.733454 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.737735 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhvd9\" (UniqueName: \"kubernetes.io/projected/2b5464c4-7767-43f0-b745-7e005b340c78-kube-api-access-xhvd9\") pod \"cinder-api-0\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " pod="openstack/cinder-api-0" Dec 03 18:07:28 crc kubenswrapper[5002]: I1203 18:07:28.823571 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 18:07:29 crc kubenswrapper[5002]: I1203 18:07:29.218477 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5457b7bdcf-k79t9"] Dec 03 18:07:29 crc kubenswrapper[5002]: I1203 18:07:29.378483 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:07:29 crc kubenswrapper[5002]: W1203 18:07:29.380401 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b5464c4_7767_43f0_b745_7e005b340c78.slice/crio-f39d882126450b088eeb9e7c424aef9aeec669e6790c3c9eb2316213244cd324 WatchSource:0}: Error finding container f39d882126450b088eeb9e7c424aef9aeec669e6790c3c9eb2316213244cd324: Status 404 returned error can't find the container with id f39d882126450b088eeb9e7c424aef9aeec669e6790c3c9eb2316213244cd324 Dec 03 18:07:30 crc kubenswrapper[5002]: I1203 18:07:30.108183 5002 generic.go:334] "Generic (PLEG): container finished" podID="acda6a07-aaed-4e3d-8b41-4bee23141f2f" containerID="f91e5a2ed97d3e0fc55b33ac8413a866b2a5a2128c01d82e0884e70213b088d5" exitCode=0 Dec 03 18:07:30 crc kubenswrapper[5002]: I1203 18:07:30.108260 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" event={"ID":"acda6a07-aaed-4e3d-8b41-4bee23141f2f","Type":"ContainerDied","Data":"f91e5a2ed97d3e0fc55b33ac8413a866b2a5a2128c01d82e0884e70213b088d5"} Dec 03 18:07:30 crc kubenswrapper[5002]: I1203 18:07:30.108786 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" event={"ID":"acda6a07-aaed-4e3d-8b41-4bee23141f2f","Type":"ContainerStarted","Data":"9949060be3d99579636b45faedfb46191fd560851c226cc646bd5fc8809b7325"} Dec 03 18:07:30 crc kubenswrapper[5002]: I1203 18:07:30.115721 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2b5464c4-7767-43f0-b745-7e005b340c78","Type":"ContainerStarted","Data":"b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11"} Dec 03 18:07:30 crc kubenswrapper[5002]: I1203 18:07:30.115856 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2b5464c4-7767-43f0-b745-7e005b340c78","Type":"ContainerStarted","Data":"f39d882126450b088eeb9e7c424aef9aeec669e6790c3c9eb2316213244cd324"} Dec 03 18:07:30 crc kubenswrapper[5002]: I1203 18:07:30.390134 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.125792 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" event={"ID":"acda6a07-aaed-4e3d-8b41-4bee23141f2f","Type":"ContainerStarted","Data":"6b379d16cb549caea061692c36fe3607343b439ecd1ba33fbbeecbeb92017427"} Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.126175 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.129245 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2b5464c4-7767-43f0-b745-7e005b340c78","Type":"ContainerStarted","Data":"639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035"} Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.129615 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.129435 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="2b5464c4-7767-43f0-b745-7e005b340c78" containerName="cinder-api" containerID="cri-o://639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035" gracePeriod=30 Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.129411 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="2b5464c4-7767-43f0-b745-7e005b340c78" containerName="cinder-api-log" containerID="cri-o://b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11" gracePeriod=30 Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.150887 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" podStartSLOduration=3.150871459 podStartE2EDuration="3.150871459s" podCreationTimestamp="2025-12-03 18:07:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:07:31.145542246 +0000 UTC m=+5774.559364134" watchObservedRunningTime="2025-12-03 18:07:31.150871459 +0000 UTC m=+5774.564693347" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.169145 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.169125211 podStartE2EDuration="3.169125211s" podCreationTimestamp="2025-12-03 18:07:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:07:31.160856968 +0000 UTC m=+5774.574678866" watchObservedRunningTime="2025-12-03 18:07:31.169125211 +0000 UTC m=+5774.582947099" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.790574 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.895625 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-scripts\") pod \"2b5464c4-7767-43f0-b745-7e005b340c78\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.895739 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2b5464c4-7767-43f0-b745-7e005b340c78-etc-machine-id\") pod \"2b5464c4-7767-43f0-b745-7e005b340c78\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.895792 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-combined-ca-bundle\") pod \"2b5464c4-7767-43f0-b745-7e005b340c78\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.895800 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2b5464c4-7767-43f0-b745-7e005b340c78-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2b5464c4-7767-43f0-b745-7e005b340c78" (UID: "2b5464c4-7767-43f0-b745-7e005b340c78"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.895820 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-config-data\") pod \"2b5464c4-7767-43f0-b745-7e005b340c78\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.896013 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-config-data-custom\") pod \"2b5464c4-7767-43f0-b745-7e005b340c78\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.896083 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhvd9\" (UniqueName: \"kubernetes.io/projected/2b5464c4-7767-43f0-b745-7e005b340c78-kube-api-access-xhvd9\") pod \"2b5464c4-7767-43f0-b745-7e005b340c78\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.896132 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b5464c4-7767-43f0-b745-7e005b340c78-logs\") pod \"2b5464c4-7767-43f0-b745-7e005b340c78\" (UID: \"2b5464c4-7767-43f0-b745-7e005b340c78\") " Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.896716 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b5464c4-7767-43f0-b745-7e005b340c78-logs" (OuterVolumeSpecName: "logs") pod "2b5464c4-7767-43f0-b745-7e005b340c78" (UID: "2b5464c4-7767-43f0-b745-7e005b340c78"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.897173 5002 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2b5464c4-7767-43f0-b745-7e005b340c78-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.897199 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b5464c4-7767-43f0-b745-7e005b340c78-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.903508 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-scripts" (OuterVolumeSpecName: "scripts") pod "2b5464c4-7767-43f0-b745-7e005b340c78" (UID: "2b5464c4-7767-43f0-b745-7e005b340c78"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.904202 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b5464c4-7767-43f0-b745-7e005b340c78-kube-api-access-xhvd9" (OuterVolumeSpecName: "kube-api-access-xhvd9") pod "2b5464c4-7767-43f0-b745-7e005b340c78" (UID: "2b5464c4-7767-43f0-b745-7e005b340c78"). InnerVolumeSpecName "kube-api-access-xhvd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.911474 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2b5464c4-7767-43f0-b745-7e005b340c78" (UID: "2b5464c4-7767-43f0-b745-7e005b340c78"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.931299 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b5464c4-7767-43f0-b745-7e005b340c78" (UID: "2b5464c4-7767-43f0-b745-7e005b340c78"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.946164 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-config-data" (OuterVolumeSpecName: "config-data") pod "2b5464c4-7767-43f0-b745-7e005b340c78" (UID: "2b5464c4-7767-43f0-b745-7e005b340c78"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.999332 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhvd9\" (UniqueName: \"kubernetes.io/projected/2b5464c4-7767-43f0-b745-7e005b340c78-kube-api-access-xhvd9\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.999370 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.999381 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.999390 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:31 crc kubenswrapper[5002]: I1203 18:07:31.999403 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b5464c4-7767-43f0-b745-7e005b340c78-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.139546 5002 generic.go:334] "Generic (PLEG): container finished" podID="2b5464c4-7767-43f0-b745-7e005b340c78" containerID="639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035" exitCode=0 Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.139577 5002 generic.go:334] "Generic (PLEG): container finished" podID="2b5464c4-7767-43f0-b745-7e005b340c78" containerID="b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11" exitCode=143 Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.139628 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.139667 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2b5464c4-7767-43f0-b745-7e005b340c78","Type":"ContainerDied","Data":"639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035"} Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.139694 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2b5464c4-7767-43f0-b745-7e005b340c78","Type":"ContainerDied","Data":"b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11"} Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.139705 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2b5464c4-7767-43f0-b745-7e005b340c78","Type":"ContainerDied","Data":"f39d882126450b088eeb9e7c424aef9aeec669e6790c3c9eb2316213244cd324"} Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.139720 5002 scope.go:117] "RemoveContainer" containerID="639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.178557 5002 scope.go:117] "RemoveContainer" containerID="b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.185988 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.198103 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.202561 5002 scope.go:117] "RemoveContainer" containerID="639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035" Dec 03 18:07:32 crc kubenswrapper[5002]: E1203 18:07:32.202936 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035\": container with ID starting with 639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035 not found: ID does not exist" containerID="639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.202967 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035"} err="failed to get container status \"639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035\": rpc error: code = NotFound desc = could not find container \"639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035\": container with ID starting with 639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035 not found: ID does not exist" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.202987 5002 scope.go:117] "RemoveContainer" containerID="b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11" Dec 03 18:07:32 crc kubenswrapper[5002]: E1203 18:07:32.203220 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11\": container with ID starting with b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11 not found: ID does not exist" containerID="b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.203252 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11"} err="failed to get container status \"b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11\": rpc error: code = NotFound desc = could not find container \"b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11\": container with ID starting with b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11 not found: ID does not exist" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.203264 5002 scope.go:117] "RemoveContainer" containerID="639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.203457 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035"} err="failed to get container status \"639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035\": rpc error: code = NotFound desc = could not find container \"639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035\": container with ID starting with 639f9d8ed56ff1b396896f5cde9db27f6a2b0f1cbc84aff00c4c9272f5521035 not found: ID does not exist" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.203481 5002 scope.go:117] "RemoveContainer" containerID="b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.203672 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11"} err="failed to get container status \"b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11\": rpc error: code = NotFound desc = could not find container \"b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11\": container with ID starting with b54a5c963edbc0adae8359e67e8deb4e1aae8c6536536eadc923c81427551e11 not found: ID does not exist" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.209304 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:07:32 crc kubenswrapper[5002]: E1203 18:07:32.209691 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b5464c4-7767-43f0-b745-7e005b340c78" containerName="cinder-api" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.209709 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b5464c4-7767-43f0-b745-7e005b340c78" containerName="cinder-api" Dec 03 18:07:32 crc kubenswrapper[5002]: E1203 18:07:32.209720 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b5464c4-7767-43f0-b745-7e005b340c78" containerName="cinder-api-log" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.209726 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b5464c4-7767-43f0-b745-7e005b340c78" containerName="cinder-api-log" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.209884 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b5464c4-7767-43f0-b745-7e005b340c78" containerName="cinder-api-log" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.209916 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b5464c4-7767-43f0-b745-7e005b340c78" containerName="cinder-api" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.210808 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.213660 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.214413 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.214445 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-8clpl" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.214682 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.214953 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.222808 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.223025 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.407725 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-public-tls-certs\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.408399 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.408457 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54a43794-8262-4c5d-b54b-16978d239271-logs\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.408525 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-scripts\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.408559 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-config-data\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.408612 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/54a43794-8262-4c5d-b54b-16978d239271-etc-machine-id\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.408651 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m5sq\" (UniqueName: \"kubernetes.io/projected/54a43794-8262-4c5d-b54b-16978d239271-kube-api-access-2m5sq\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.408726 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-config-data-custom\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.408819 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.511001 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-config-data\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.511080 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m5sq\" (UniqueName: \"kubernetes.io/projected/54a43794-8262-4c5d-b54b-16978d239271-kube-api-access-2m5sq\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.511101 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/54a43794-8262-4c5d-b54b-16978d239271-etc-machine-id\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.511150 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-config-data-custom\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.511194 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.511247 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-public-tls-certs\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.511268 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.511268 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/54a43794-8262-4c5d-b54b-16978d239271-etc-machine-id\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.511292 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-scripts\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.511351 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54a43794-8262-4c5d-b54b-16978d239271-logs\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.512012 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54a43794-8262-4c5d-b54b-16978d239271-logs\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.515819 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-scripts\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.515836 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-config-data-custom\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.517096 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.517390 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.517737 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-config-data\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.521278 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-public-tls-certs\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.534290 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m5sq\" (UniqueName: \"kubernetes.io/projected/54a43794-8262-4c5d-b54b-16978d239271-kube-api-access-2m5sq\") pod \"cinder-api-0\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.545834 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 18:07:32 crc kubenswrapper[5002]: I1203 18:07:32.855880 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b5464c4-7767-43f0-b745-7e005b340c78" path="/var/lib/kubelet/pods/2b5464c4-7767-43f0-b745-7e005b340c78/volumes" Dec 03 18:07:33 crc kubenswrapper[5002]: I1203 18:07:33.084194 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:07:33 crc kubenswrapper[5002]: I1203 18:07:33.153606 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"54a43794-8262-4c5d-b54b-16978d239271","Type":"ContainerStarted","Data":"13aff22ce5e293cbbe46905413a82b11de298d6cfdd4f57a7c76c1a65a117772"} Dec 03 18:07:34 crc kubenswrapper[5002]: I1203 18:07:34.168706 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"54a43794-8262-4c5d-b54b-16978d239271","Type":"ContainerStarted","Data":"6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290"} Dec 03 18:07:35 crc kubenswrapper[5002]: I1203 18:07:35.182599 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"54a43794-8262-4c5d-b54b-16978d239271","Type":"ContainerStarted","Data":"a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28"} Dec 03 18:07:35 crc kubenswrapper[5002]: I1203 18:07:35.183052 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 18:07:38 crc kubenswrapper[5002]: I1203 18:07:38.735060 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:07:38 crc kubenswrapper[5002]: I1203 18:07:38.761086 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.761063099 podStartE2EDuration="6.761063099s" podCreationTimestamp="2025-12-03 18:07:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:07:35.206965273 +0000 UTC m=+5778.620787181" watchObservedRunningTime="2025-12-03 18:07:38.761063099 +0000 UTC m=+5782.174884987" Dec 03 18:07:38 crc kubenswrapper[5002]: I1203 18:07:38.801128 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c5f95c47-l4xmq"] Dec 03 18:07:38 crc kubenswrapper[5002]: I1203 18:07:38.801382 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" podUID="13642341-a68b-43fb-99b6-e10187ca6b8c" containerName="dnsmasq-dns" containerID="cri-o://c763cac1804b839b808320b26b1651b6b6b0fdd09ecb2a467972dc61a1516510" gracePeriod=10 Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.221161 5002 generic.go:334] "Generic (PLEG): container finished" podID="13642341-a68b-43fb-99b6-e10187ca6b8c" containerID="c763cac1804b839b808320b26b1651b6b6b0fdd09ecb2a467972dc61a1516510" exitCode=0 Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.221232 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" event={"ID":"13642341-a68b-43fb-99b6-e10187ca6b8c","Type":"ContainerDied","Data":"c763cac1804b839b808320b26b1651b6b6b0fdd09ecb2a467972dc61a1516510"} Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.739882 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.844971 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-ovsdbserver-sb\") pod \"13642341-a68b-43fb-99b6-e10187ca6b8c\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.845553 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-ovsdbserver-nb\") pod \"13642341-a68b-43fb-99b6-e10187ca6b8c\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.845783 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-dns-svc\") pod \"13642341-a68b-43fb-99b6-e10187ca6b8c\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.846017 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-config\") pod \"13642341-a68b-43fb-99b6-e10187ca6b8c\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.846208 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssk42\" (UniqueName: \"kubernetes.io/projected/13642341-a68b-43fb-99b6-e10187ca6b8c-kube-api-access-ssk42\") pod \"13642341-a68b-43fb-99b6-e10187ca6b8c\" (UID: \"13642341-a68b-43fb-99b6-e10187ca6b8c\") " Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.854695 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13642341-a68b-43fb-99b6-e10187ca6b8c-kube-api-access-ssk42" (OuterVolumeSpecName: "kube-api-access-ssk42") pod "13642341-a68b-43fb-99b6-e10187ca6b8c" (UID: "13642341-a68b-43fb-99b6-e10187ca6b8c"). InnerVolumeSpecName "kube-api-access-ssk42". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.902177 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "13642341-a68b-43fb-99b6-e10187ca6b8c" (UID: "13642341-a68b-43fb-99b6-e10187ca6b8c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.902637 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "13642341-a68b-43fb-99b6-e10187ca6b8c" (UID: "13642341-a68b-43fb-99b6-e10187ca6b8c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.905667 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "13642341-a68b-43fb-99b6-e10187ca6b8c" (UID: "13642341-a68b-43fb-99b6-e10187ca6b8c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.911281 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-config" (OuterVolumeSpecName: "config") pod "13642341-a68b-43fb-99b6-e10187ca6b8c" (UID: "13642341-a68b-43fb-99b6-e10187ca6b8c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.951914 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.951971 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.951991 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.952010 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13642341-a68b-43fb-99b6-e10187ca6b8c-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:39 crc kubenswrapper[5002]: I1203 18:07:39.952027 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssk42\" (UniqueName: \"kubernetes.io/projected/13642341-a68b-43fb-99b6-e10187ca6b8c-kube-api-access-ssk42\") on node \"crc\" DevicePath \"\"" Dec 03 18:07:40 crc kubenswrapper[5002]: I1203 18:07:40.230798 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" event={"ID":"13642341-a68b-43fb-99b6-e10187ca6b8c","Type":"ContainerDied","Data":"40f3f125ee4e350ba3485a382dbe47f95d026792c3ddae1253a78a0794a3f00a"} Dec 03 18:07:40 crc kubenswrapper[5002]: I1203 18:07:40.230857 5002 scope.go:117] "RemoveContainer" containerID="c763cac1804b839b808320b26b1651b6b6b0fdd09ecb2a467972dc61a1516510" Dec 03 18:07:40 crc kubenswrapper[5002]: I1203 18:07:40.230894 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c5f95c47-l4xmq" Dec 03 18:07:40 crc kubenswrapper[5002]: I1203 18:07:40.261288 5002 scope.go:117] "RemoveContainer" containerID="99897f4a5a41c5d566ee9152e326929be5108db31303f22e30cf1e89f3885c7d" Dec 03 18:07:40 crc kubenswrapper[5002]: I1203 18:07:40.268663 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c5f95c47-l4xmq"] Dec 03 18:07:40 crc kubenswrapper[5002]: I1203 18:07:40.276736 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c5f95c47-l4xmq"] Dec 03 18:07:40 crc kubenswrapper[5002]: I1203 18:07:40.853383 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13642341-a68b-43fb-99b6-e10187ca6b8c" path="/var/lib/kubelet/pods/13642341-a68b-43fb-99b6-e10187ca6b8c/volumes" Dec 03 18:07:42 crc kubenswrapper[5002]: I1203 18:07:42.159292 5002 scope.go:117] "RemoveContainer" containerID="674ea34615849135115aabcb26e347be3e3e17a5b5a2aeb71f87d71cac8846ff" Dec 03 18:07:42 crc kubenswrapper[5002]: I1203 18:07:42.194739 5002 scope.go:117] "RemoveContainer" containerID="01f0925c64c38da0fbc7a703cf722fe7ab0d34672351d740b0a58f3875c9e8ed" Dec 03 18:07:44 crc kubenswrapper[5002]: I1203 18:07:44.426230 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 03 18:07:50 crc kubenswrapper[5002]: I1203 18:07:50.916877 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:07:50 crc kubenswrapper[5002]: I1203 18:07:50.917862 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:07:50 crc kubenswrapper[5002]: I1203 18:07:50.917944 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 18:07:50 crc kubenswrapper[5002]: I1203 18:07:50.919673 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f5d0d4d14dd7dac7b94bb6ad5c76c5a78375d0dbd75748edfcbc430636410612"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 18:07:50 crc kubenswrapper[5002]: I1203 18:07:50.919914 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://f5d0d4d14dd7dac7b94bb6ad5c76c5a78375d0dbd75748edfcbc430636410612" gracePeriod=600 Dec 03 18:07:51 crc kubenswrapper[5002]: I1203 18:07:51.354857 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="f5d0d4d14dd7dac7b94bb6ad5c76c5a78375d0dbd75748edfcbc430636410612" exitCode=0 Dec 03 18:07:51 crc kubenswrapper[5002]: I1203 18:07:51.354933 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"f5d0d4d14dd7dac7b94bb6ad5c76c5a78375d0dbd75748edfcbc430636410612"} Dec 03 18:07:51 crc kubenswrapper[5002]: I1203 18:07:51.355250 5002 scope.go:117] "RemoveContainer" containerID="d6c23194951a8e9cfc09599a8775bd095404563a20187d11b185f11e32c2677f" Dec 03 18:07:52 crc kubenswrapper[5002]: I1203 18:07:52.384739 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5"} Dec 03 18:08:00 crc kubenswrapper[5002]: I1203 18:08:00.925872 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 18:08:00 crc kubenswrapper[5002]: E1203 18:08:00.926683 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13642341-a68b-43fb-99b6-e10187ca6b8c" containerName="dnsmasq-dns" Dec 03 18:08:00 crc kubenswrapper[5002]: I1203 18:08:00.926703 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="13642341-a68b-43fb-99b6-e10187ca6b8c" containerName="dnsmasq-dns" Dec 03 18:08:00 crc kubenswrapper[5002]: E1203 18:08:00.926764 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13642341-a68b-43fb-99b6-e10187ca6b8c" containerName="init" Dec 03 18:08:00 crc kubenswrapper[5002]: I1203 18:08:00.926774 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="13642341-a68b-43fb-99b6-e10187ca6b8c" containerName="init" Dec 03 18:08:00 crc kubenswrapper[5002]: I1203 18:08:00.926981 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="13642341-a68b-43fb-99b6-e10187ca6b8c" containerName="dnsmasq-dns" Dec 03 18:08:00 crc kubenswrapper[5002]: I1203 18:08:00.928144 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 18:08:00 crc kubenswrapper[5002]: I1203 18:08:00.938296 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 18:08:00 crc kubenswrapper[5002]: I1203 18:08:00.942219 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.073325 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.073423 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlq5w\" (UniqueName: \"kubernetes.io/projected/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-kube-api-access-qlq5w\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.073486 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.073566 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.073598 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-scripts\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.073647 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-config-data\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.174669 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-config-data\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.174732 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.174826 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlq5w\" (UniqueName: \"kubernetes.io/projected/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-kube-api-access-qlq5w\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.174855 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.174908 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.174929 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-scripts\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.175329 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.181260 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.181495 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-config-data\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.182332 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.182912 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-scripts\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.198281 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlq5w\" (UniqueName: \"kubernetes.io/projected/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-kube-api-access-qlq5w\") pod \"cinder-scheduler-0\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.256573 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 18:08:01 crc kubenswrapper[5002]: I1203 18:08:01.729966 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 18:08:02 crc kubenswrapper[5002]: I1203 18:08:02.306092 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:08:02 crc kubenswrapper[5002]: I1203 18:08:02.306675 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="54a43794-8262-4c5d-b54b-16978d239271" containerName="cinder-api-log" containerID="cri-o://6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290" gracePeriod=30 Dec 03 18:08:02 crc kubenswrapper[5002]: I1203 18:08:02.306802 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="54a43794-8262-4c5d-b54b-16978d239271" containerName="cinder-api" containerID="cri-o://a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28" gracePeriod=30 Dec 03 18:08:02 crc kubenswrapper[5002]: I1203 18:08:02.477437 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"707634ac-d0d0-4cfc-81d8-427fd4eaaf91","Type":"ContainerStarted","Data":"ad3dba66b80660571a2ba6156fd684aacf4627d853e2c31fc998dcd552c8f650"} Dec 03 18:08:02 crc kubenswrapper[5002]: I1203 18:08:02.477857 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"707634ac-d0d0-4cfc-81d8-427fd4eaaf91","Type":"ContainerStarted","Data":"af34f20aafa8c2bb68036f156aac2496cdd8678b5b84be972b6b5b7ba89ace29"} Dec 03 18:08:02 crc kubenswrapper[5002]: I1203 18:08:02.484513 5002 generic.go:334] "Generic (PLEG): container finished" podID="54a43794-8262-4c5d-b54b-16978d239271" containerID="6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290" exitCode=143 Dec 03 18:08:02 crc kubenswrapper[5002]: I1203 18:08:02.484581 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"54a43794-8262-4c5d-b54b-16978d239271","Type":"ContainerDied","Data":"6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290"} Dec 03 18:08:03 crc kubenswrapper[5002]: I1203 18:08:03.509125 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"707634ac-d0d0-4cfc-81d8-427fd4eaaf91","Type":"ContainerStarted","Data":"d5015a0b346d0ad972284510b59bc16221c7557964b3b30fb56210d1bbbbcb79"} Dec 03 18:08:03 crc kubenswrapper[5002]: I1203 18:08:03.538445 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.538419909 podStartE2EDuration="3.538419909s" podCreationTimestamp="2025-12-03 18:08:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:08:03.533821805 +0000 UTC m=+5806.947643693" watchObservedRunningTime="2025-12-03 18:08:03.538419909 +0000 UTC m=+5806.952241797" Dec 03 18:08:05 crc kubenswrapper[5002]: I1203 18:08:05.463674 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="54a43794-8262-4c5d-b54b-16978d239271" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.48:8776/healthcheck\": read tcp 10.217.0.2:60016->10.217.1.48:8776: read: connection reset by peer" Dec 03 18:08:05 crc kubenswrapper[5002]: I1203 18:08:05.961457 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.088999 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-config-data\") pod \"54a43794-8262-4c5d-b54b-16978d239271\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.089037 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/54a43794-8262-4c5d-b54b-16978d239271-etc-machine-id\") pod \"54a43794-8262-4c5d-b54b-16978d239271\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.089057 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54a43794-8262-4c5d-b54b-16978d239271-logs\") pod \"54a43794-8262-4c5d-b54b-16978d239271\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.089074 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-config-data-custom\") pod \"54a43794-8262-4c5d-b54b-16978d239271\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.089125 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m5sq\" (UniqueName: \"kubernetes.io/projected/54a43794-8262-4c5d-b54b-16978d239271-kube-api-access-2m5sq\") pod \"54a43794-8262-4c5d-b54b-16978d239271\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.089163 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-combined-ca-bundle\") pod \"54a43794-8262-4c5d-b54b-16978d239271\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.089218 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-scripts\") pod \"54a43794-8262-4c5d-b54b-16978d239271\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.089253 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-internal-tls-certs\") pod \"54a43794-8262-4c5d-b54b-16978d239271\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.089348 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-public-tls-certs\") pod \"54a43794-8262-4c5d-b54b-16978d239271\" (UID: \"54a43794-8262-4c5d-b54b-16978d239271\") " Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.092046 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54a43794-8262-4c5d-b54b-16978d239271-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "54a43794-8262-4c5d-b54b-16978d239271" (UID: "54a43794-8262-4c5d-b54b-16978d239271"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.092308 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54a43794-8262-4c5d-b54b-16978d239271-logs" (OuterVolumeSpecName: "logs") pod "54a43794-8262-4c5d-b54b-16978d239271" (UID: "54a43794-8262-4c5d-b54b-16978d239271"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.095894 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-scripts" (OuterVolumeSpecName: "scripts") pod "54a43794-8262-4c5d-b54b-16978d239271" (UID: "54a43794-8262-4c5d-b54b-16978d239271"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.097413 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "54a43794-8262-4c5d-b54b-16978d239271" (UID: "54a43794-8262-4c5d-b54b-16978d239271"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.101045 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54a43794-8262-4c5d-b54b-16978d239271-kube-api-access-2m5sq" (OuterVolumeSpecName: "kube-api-access-2m5sq") pod "54a43794-8262-4c5d-b54b-16978d239271" (UID: "54a43794-8262-4c5d-b54b-16978d239271"). InnerVolumeSpecName "kube-api-access-2m5sq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.136017 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54a43794-8262-4c5d-b54b-16978d239271" (UID: "54a43794-8262-4c5d-b54b-16978d239271"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.154979 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-config-data" (OuterVolumeSpecName: "config-data") pod "54a43794-8262-4c5d-b54b-16978d239271" (UID: "54a43794-8262-4c5d-b54b-16978d239271"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.161558 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "54a43794-8262-4c5d-b54b-16978d239271" (UID: "54a43794-8262-4c5d-b54b-16978d239271"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.162121 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "54a43794-8262-4c5d-b54b-16978d239271" (UID: "54a43794-8262-4c5d-b54b-16978d239271"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.191150 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m5sq\" (UniqueName: \"kubernetes.io/projected/54a43794-8262-4c5d-b54b-16978d239271-kube-api-access-2m5sq\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.191189 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.191200 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.191208 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.191217 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.191225 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.191233 5002 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/54a43794-8262-4c5d-b54b-16978d239271-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.191241 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54a43794-8262-4c5d-b54b-16978d239271-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.191250 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/54a43794-8262-4c5d-b54b-16978d239271-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.257055 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.537098 5002 generic.go:334] "Generic (PLEG): container finished" podID="54a43794-8262-4c5d-b54b-16978d239271" containerID="a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28" exitCode=0 Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.537141 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"54a43794-8262-4c5d-b54b-16978d239271","Type":"ContainerDied","Data":"a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28"} Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.537158 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.537173 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"54a43794-8262-4c5d-b54b-16978d239271","Type":"ContainerDied","Data":"13aff22ce5e293cbbe46905413a82b11de298d6cfdd4f57a7c76c1a65a117772"} Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.537203 5002 scope.go:117] "RemoveContainer" containerID="a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.627251 5002 scope.go:117] "RemoveContainer" containerID="6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.627669 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.637207 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.650358 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:08:06 crc kubenswrapper[5002]: E1203 18:08:06.651202 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54a43794-8262-4c5d-b54b-16978d239271" containerName="cinder-api" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.651232 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="54a43794-8262-4c5d-b54b-16978d239271" containerName="cinder-api" Dec 03 18:08:06 crc kubenswrapper[5002]: E1203 18:08:06.651254 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54a43794-8262-4c5d-b54b-16978d239271" containerName="cinder-api-log" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.651266 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="54a43794-8262-4c5d-b54b-16978d239271" containerName="cinder-api-log" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.651482 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="54a43794-8262-4c5d-b54b-16978d239271" containerName="cinder-api-log" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.651524 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="54a43794-8262-4c5d-b54b-16978d239271" containerName="cinder-api" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.653202 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.656466 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.656607 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.656612 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.662407 5002 scope.go:117] "RemoveContainer" containerID="a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.662534 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:08:06 crc kubenswrapper[5002]: E1203 18:08:06.663057 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28\": container with ID starting with a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28 not found: ID does not exist" containerID="a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.663088 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28"} err="failed to get container status \"a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28\": rpc error: code = NotFound desc = could not find container \"a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28\": container with ID starting with a88be162a1c5c9b438409cb3ea627ac0e47009d80dcac1d13079befb5ba0bb28 not found: ID does not exist" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.663112 5002 scope.go:117] "RemoveContainer" containerID="6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290" Dec 03 18:08:06 crc kubenswrapper[5002]: E1203 18:08:06.663414 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290\": container with ID starting with 6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290 not found: ID does not exist" containerID="6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.663534 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290"} err="failed to get container status \"6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290\": rpc error: code = NotFound desc = could not find container \"6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290\": container with ID starting with 6091f27db5f51d76507d53d29b0e240eb0fb39633f1540a117c848affbd1a290 not found: ID does not exist" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.800595 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-config-data\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.800804 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-public-tls-certs\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.800868 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c77eef01-0f3d-4977-b89d-115010fe9491-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.801113 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rc5h\" (UniqueName: \"kubernetes.io/projected/c77eef01-0f3d-4977-b89d-115010fe9491-kube-api-access-6rc5h\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.801178 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-config-data-custom\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.801263 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.801457 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c77eef01-0f3d-4977-b89d-115010fe9491-logs\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.801691 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.801962 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-scripts\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.852531 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54a43794-8262-4c5d-b54b-16978d239271" path="/var/lib/kubelet/pods/54a43794-8262-4c5d-b54b-16978d239271/volumes" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.903594 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-scripts\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.903649 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-config-data\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.903718 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c77eef01-0f3d-4977-b89d-115010fe9491-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.903817 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-public-tls-certs\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.903894 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rc5h\" (UniqueName: \"kubernetes.io/projected/c77eef01-0f3d-4977-b89d-115010fe9491-kube-api-access-6rc5h\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.903923 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-config-data-custom\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.903958 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.903988 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c77eef01-0f3d-4977-b89d-115010fe9491-logs\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.904038 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.904254 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c77eef01-0f3d-4977-b89d-115010fe9491-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.905027 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c77eef01-0f3d-4977-b89d-115010fe9491-logs\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.908549 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-scripts\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.908996 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-public-tls-certs\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.909210 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-config-data\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.909840 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.910382 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.910374 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c77eef01-0f3d-4977-b89d-115010fe9491-config-data-custom\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.928693 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rc5h\" (UniqueName: \"kubernetes.io/projected/c77eef01-0f3d-4977-b89d-115010fe9491-kube-api-access-6rc5h\") pod \"cinder-api-0\" (UID: \"c77eef01-0f3d-4977-b89d-115010fe9491\") " pod="openstack/cinder-api-0" Dec 03 18:08:06 crc kubenswrapper[5002]: I1203 18:08:06.978511 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 18:08:07 crc kubenswrapper[5002]: I1203 18:08:07.407800 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 18:08:07 crc kubenswrapper[5002]: I1203 18:08:07.546154 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c77eef01-0f3d-4977-b89d-115010fe9491","Type":"ContainerStarted","Data":"9fc3da9c77f84a6d8944d1846084434555fc82acc8dbd110e08e4c85f1042179"} Dec 03 18:08:08 crc kubenswrapper[5002]: I1203 18:08:08.559271 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c77eef01-0f3d-4977-b89d-115010fe9491","Type":"ContainerStarted","Data":"9867a5497f12d69cfed7364cd51057b23fdfd372a84aab6e321d6477499abb28"} Dec 03 18:08:08 crc kubenswrapper[5002]: I1203 18:08:08.559662 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 18:08:08 crc kubenswrapper[5002]: I1203 18:08:08.559674 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c77eef01-0f3d-4977-b89d-115010fe9491","Type":"ContainerStarted","Data":"bc181193a3ff3fe01f8f8ea7e39401b93165e346115f26bcd2942db5067e67f3"} Dec 03 18:08:11 crc kubenswrapper[5002]: I1203 18:08:11.473141 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 18:08:11 crc kubenswrapper[5002]: I1203 18:08:11.497851 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.497830554 podStartE2EDuration="5.497830554s" podCreationTimestamp="2025-12-03 18:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:08:08.5797071 +0000 UTC m=+5811.993528998" watchObservedRunningTime="2025-12-03 18:08:11.497830554 +0000 UTC m=+5814.911652442" Dec 03 18:08:11 crc kubenswrapper[5002]: I1203 18:08:11.525739 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 18:08:11 crc kubenswrapper[5002]: I1203 18:08:11.586983 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="707634ac-d0d0-4cfc-81d8-427fd4eaaf91" containerName="cinder-scheduler" containerID="cri-o://ad3dba66b80660571a2ba6156fd684aacf4627d853e2c31fc998dcd552c8f650" gracePeriod=30 Dec 03 18:08:11 crc kubenswrapper[5002]: I1203 18:08:11.587023 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="707634ac-d0d0-4cfc-81d8-427fd4eaaf91" containerName="probe" containerID="cri-o://d5015a0b346d0ad972284510b59bc16221c7557964b3b30fb56210d1bbbbcb79" gracePeriod=30 Dec 03 18:08:12 crc kubenswrapper[5002]: I1203 18:08:12.597603 5002 generic.go:334] "Generic (PLEG): container finished" podID="707634ac-d0d0-4cfc-81d8-427fd4eaaf91" containerID="d5015a0b346d0ad972284510b59bc16221c7557964b3b30fb56210d1bbbbcb79" exitCode=0 Dec 03 18:08:12 crc kubenswrapper[5002]: I1203 18:08:12.597681 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"707634ac-d0d0-4cfc-81d8-427fd4eaaf91","Type":"ContainerDied","Data":"d5015a0b346d0ad972284510b59bc16221c7557964b3b30fb56210d1bbbbcb79"} Dec 03 18:08:13 crc kubenswrapper[5002]: I1203 18:08:13.616308 5002 generic.go:334] "Generic (PLEG): container finished" podID="707634ac-d0d0-4cfc-81d8-427fd4eaaf91" containerID="ad3dba66b80660571a2ba6156fd684aacf4627d853e2c31fc998dcd552c8f650" exitCode=0 Dec 03 18:08:13 crc kubenswrapper[5002]: I1203 18:08:13.616353 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"707634ac-d0d0-4cfc-81d8-427fd4eaaf91","Type":"ContainerDied","Data":"ad3dba66b80660571a2ba6156fd684aacf4627d853e2c31fc998dcd552c8f650"} Dec 03 18:08:13 crc kubenswrapper[5002]: I1203 18:08:13.881452 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.030777 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-etc-machine-id\") pod \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.030942 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "707634ac-d0d0-4cfc-81d8-427fd4eaaf91" (UID: "707634ac-d0d0-4cfc-81d8-427fd4eaaf91"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.030988 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlq5w\" (UniqueName: \"kubernetes.io/projected/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-kube-api-access-qlq5w\") pod \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.031913 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-config-data-custom\") pod \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.032043 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-combined-ca-bundle\") pod \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.032145 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-scripts\") pod \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.032177 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-config-data\") pod \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\" (UID: \"707634ac-d0d0-4cfc-81d8-427fd4eaaf91\") " Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.033053 5002 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.038496 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "707634ac-d0d0-4cfc-81d8-427fd4eaaf91" (UID: "707634ac-d0d0-4cfc-81d8-427fd4eaaf91"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.038515 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-scripts" (OuterVolumeSpecName: "scripts") pod "707634ac-d0d0-4cfc-81d8-427fd4eaaf91" (UID: "707634ac-d0d0-4cfc-81d8-427fd4eaaf91"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.038646 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-kube-api-access-qlq5w" (OuterVolumeSpecName: "kube-api-access-qlq5w") pod "707634ac-d0d0-4cfc-81d8-427fd4eaaf91" (UID: "707634ac-d0d0-4cfc-81d8-427fd4eaaf91"). InnerVolumeSpecName "kube-api-access-qlq5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.083961 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "707634ac-d0d0-4cfc-81d8-427fd4eaaf91" (UID: "707634ac-d0d0-4cfc-81d8-427fd4eaaf91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.127904 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-config-data" (OuterVolumeSpecName: "config-data") pod "707634ac-d0d0-4cfc-81d8-427fd4eaaf91" (UID: "707634ac-d0d0-4cfc-81d8-427fd4eaaf91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.134356 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlq5w\" (UniqueName: \"kubernetes.io/projected/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-kube-api-access-qlq5w\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.134387 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.134397 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.134405 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.134415 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707634ac-d0d0-4cfc-81d8-427fd4eaaf91-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.628955 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"707634ac-d0d0-4cfc-81d8-427fd4eaaf91","Type":"ContainerDied","Data":"af34f20aafa8c2bb68036f156aac2496cdd8678b5b84be972b6b5b7ba89ace29"} Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.629005 5002 scope.go:117] "RemoveContainer" containerID="d5015a0b346d0ad972284510b59bc16221c7557964b3b30fb56210d1bbbbcb79" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.629124 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.661152 5002 scope.go:117] "RemoveContainer" containerID="ad3dba66b80660571a2ba6156fd684aacf4627d853e2c31fc998dcd552c8f650" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.662416 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.676044 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.693833 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 18:08:14 crc kubenswrapper[5002]: E1203 18:08:14.694168 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="707634ac-d0d0-4cfc-81d8-427fd4eaaf91" containerName="probe" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.694184 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="707634ac-d0d0-4cfc-81d8-427fd4eaaf91" containerName="probe" Dec 03 18:08:14 crc kubenswrapper[5002]: E1203 18:08:14.694194 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="707634ac-d0d0-4cfc-81d8-427fd4eaaf91" containerName="cinder-scheduler" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.694200 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="707634ac-d0d0-4cfc-81d8-427fd4eaaf91" containerName="cinder-scheduler" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.694361 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="707634ac-d0d0-4cfc-81d8-427fd4eaaf91" containerName="cinder-scheduler" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.694382 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="707634ac-d0d0-4cfc-81d8-427fd4eaaf91" containerName="probe" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.695314 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.700341 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.707763 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.846166 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v8gd\" (UniqueName: \"kubernetes.io/projected/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-kube-api-access-7v8gd\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.846217 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.846244 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-scripts\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.846259 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-config-data\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.846398 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.846450 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.854088 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="707634ac-d0d0-4cfc-81d8-427fd4eaaf91" path="/var/lib/kubelet/pods/707634ac-d0d0-4cfc-81d8-427fd4eaaf91/volumes" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.949010 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v8gd\" (UniqueName: \"kubernetes.io/projected/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-kube-api-access-7v8gd\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.949084 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.949164 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.949205 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-scripts\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.949230 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-config-data\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.949293 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.949501 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.953546 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.953725 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-config-data\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.954237 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-scripts\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.955498 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:14 crc kubenswrapper[5002]: I1203 18:08:14.971421 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v8gd\" (UniqueName: \"kubernetes.io/projected/72b3be30-cf6e-4948-8c2b-6ab8e4144f06-kube-api-access-7v8gd\") pod \"cinder-scheduler-0\" (UID: \"72b3be30-cf6e-4948-8c2b-6ab8e4144f06\") " pod="openstack/cinder-scheduler-0" Dec 03 18:08:15 crc kubenswrapper[5002]: I1203 18:08:15.014560 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 18:08:15 crc kubenswrapper[5002]: I1203 18:08:15.477077 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 18:08:15 crc kubenswrapper[5002]: I1203 18:08:15.641154 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"72b3be30-cf6e-4948-8c2b-6ab8e4144f06","Type":"ContainerStarted","Data":"993d1cee1e846fd7d8dd6952fe62a964502509dd7ca0805a30ea22b5facced12"} Dec 03 18:08:16 crc kubenswrapper[5002]: I1203 18:08:16.653858 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"72b3be30-cf6e-4948-8c2b-6ab8e4144f06","Type":"ContainerStarted","Data":"98972f6a246854dafc5808c93d0aabc02193d83f5bf15245a7a4eca1caea904e"} Dec 03 18:08:17 crc kubenswrapper[5002]: I1203 18:08:17.667972 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"72b3be30-cf6e-4948-8c2b-6ab8e4144f06","Type":"ContainerStarted","Data":"9db14725c9da3335bcf9aca2e63b6892c38a56b9b79ebed25aaf86ebca008cf2"} Dec 03 18:08:17 crc kubenswrapper[5002]: I1203 18:08:17.692760 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.692712394 podStartE2EDuration="3.692712394s" podCreationTimestamp="2025-12-03 18:08:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:08:17.686328062 +0000 UTC m=+5821.100149950" watchObservedRunningTime="2025-12-03 18:08:17.692712394 +0000 UTC m=+5821.106534282" Dec 03 18:08:18 crc kubenswrapper[5002]: I1203 18:08:18.731403 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 03 18:08:20 crc kubenswrapper[5002]: I1203 18:08:20.014638 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 18:08:25 crc kubenswrapper[5002]: I1203 18:08:25.222244 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 18:08:27 crc kubenswrapper[5002]: I1203 18:08:27.856407 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-8tvgf"] Dec 03 18:08:27 crc kubenswrapper[5002]: I1203 18:08:27.860406 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8tvgf" Dec 03 18:08:27 crc kubenswrapper[5002]: I1203 18:08:27.866072 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-8tvgf"] Dec 03 18:08:27 crc kubenswrapper[5002]: I1203 18:08:27.964682 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-a01f-account-create-update-94srm"] Dec 03 18:08:27 crc kubenswrapper[5002]: I1203 18:08:27.965853 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a01f-account-create-update-94srm" Dec 03 18:08:27 crc kubenswrapper[5002]: I1203 18:08:27.967905 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 03 18:08:27 crc kubenswrapper[5002]: I1203 18:08:27.974273 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-a01f-account-create-update-94srm"] Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.028339 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmk7s\" (UniqueName: \"kubernetes.io/projected/9ee68fc3-1a09-4d84-a183-d4d78f3f9006-kube-api-access-jmk7s\") pod \"glance-db-create-8tvgf\" (UID: \"9ee68fc3-1a09-4d84-a183-d4d78f3f9006\") " pod="openstack/glance-db-create-8tvgf" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.028844 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ee68fc3-1a09-4d84-a183-d4d78f3f9006-operator-scripts\") pod \"glance-db-create-8tvgf\" (UID: \"9ee68fc3-1a09-4d84-a183-d4d78f3f9006\") " pod="openstack/glance-db-create-8tvgf" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.130050 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hng9\" (UniqueName: \"kubernetes.io/projected/ef699e44-5aa7-4513-bb2e-e51999ad601f-kube-api-access-5hng9\") pod \"glance-a01f-account-create-update-94srm\" (UID: \"ef699e44-5aa7-4513-bb2e-e51999ad601f\") " pod="openstack/glance-a01f-account-create-update-94srm" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.130301 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ee68fc3-1a09-4d84-a183-d4d78f3f9006-operator-scripts\") pod \"glance-db-create-8tvgf\" (UID: \"9ee68fc3-1a09-4d84-a183-d4d78f3f9006\") " pod="openstack/glance-db-create-8tvgf" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.130357 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef699e44-5aa7-4513-bb2e-e51999ad601f-operator-scripts\") pod \"glance-a01f-account-create-update-94srm\" (UID: \"ef699e44-5aa7-4513-bb2e-e51999ad601f\") " pod="openstack/glance-a01f-account-create-update-94srm" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.130675 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmk7s\" (UniqueName: \"kubernetes.io/projected/9ee68fc3-1a09-4d84-a183-d4d78f3f9006-kube-api-access-jmk7s\") pod \"glance-db-create-8tvgf\" (UID: \"9ee68fc3-1a09-4d84-a183-d4d78f3f9006\") " pod="openstack/glance-db-create-8tvgf" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.131032 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ee68fc3-1a09-4d84-a183-d4d78f3f9006-operator-scripts\") pod \"glance-db-create-8tvgf\" (UID: \"9ee68fc3-1a09-4d84-a183-d4d78f3f9006\") " pod="openstack/glance-db-create-8tvgf" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.153860 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmk7s\" (UniqueName: \"kubernetes.io/projected/9ee68fc3-1a09-4d84-a183-d4d78f3f9006-kube-api-access-jmk7s\") pod \"glance-db-create-8tvgf\" (UID: \"9ee68fc3-1a09-4d84-a183-d4d78f3f9006\") " pod="openstack/glance-db-create-8tvgf" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.188822 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8tvgf" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.232037 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hng9\" (UniqueName: \"kubernetes.io/projected/ef699e44-5aa7-4513-bb2e-e51999ad601f-kube-api-access-5hng9\") pod \"glance-a01f-account-create-update-94srm\" (UID: \"ef699e44-5aa7-4513-bb2e-e51999ad601f\") " pod="openstack/glance-a01f-account-create-update-94srm" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.232130 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef699e44-5aa7-4513-bb2e-e51999ad601f-operator-scripts\") pod \"glance-a01f-account-create-update-94srm\" (UID: \"ef699e44-5aa7-4513-bb2e-e51999ad601f\") " pod="openstack/glance-a01f-account-create-update-94srm" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.233018 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef699e44-5aa7-4513-bb2e-e51999ad601f-operator-scripts\") pod \"glance-a01f-account-create-update-94srm\" (UID: \"ef699e44-5aa7-4513-bb2e-e51999ad601f\") " pod="openstack/glance-a01f-account-create-update-94srm" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.253795 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hng9\" (UniqueName: \"kubernetes.io/projected/ef699e44-5aa7-4513-bb2e-e51999ad601f-kube-api-access-5hng9\") pod \"glance-a01f-account-create-update-94srm\" (UID: \"ef699e44-5aa7-4513-bb2e-e51999ad601f\") " pod="openstack/glance-a01f-account-create-update-94srm" Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.303845 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a01f-account-create-update-94srm" Dec 03 18:08:28 crc kubenswrapper[5002]: W1203 18:08:28.671704 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ee68fc3_1a09_4d84_a183_d4d78f3f9006.slice/crio-3a2ead0aebfd573bec5fd068e8976d5257c57a332f704820bc2b1553cc680365 WatchSource:0}: Error finding container 3a2ead0aebfd573bec5fd068e8976d5257c57a332f704820bc2b1553cc680365: Status 404 returned error can't find the container with id 3a2ead0aebfd573bec5fd068e8976d5257c57a332f704820bc2b1553cc680365 Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.677803 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-8tvgf"] Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.768625 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-8tvgf" event={"ID":"9ee68fc3-1a09-4d84-a183-d4d78f3f9006","Type":"ContainerStarted","Data":"3a2ead0aebfd573bec5fd068e8976d5257c57a332f704820bc2b1553cc680365"} Dec 03 18:08:28 crc kubenswrapper[5002]: I1203 18:08:28.791728 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-a01f-account-create-update-94srm"] Dec 03 18:08:28 crc kubenswrapper[5002]: W1203 18:08:28.799412 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef699e44_5aa7_4513_bb2e_e51999ad601f.slice/crio-d39952ff5d5dac11252128edba9dbbb9e11510f609c8f22c2f3104437de2cffc WatchSource:0}: Error finding container d39952ff5d5dac11252128edba9dbbb9e11510f609c8f22c2f3104437de2cffc: Status 404 returned error can't find the container with id d39952ff5d5dac11252128edba9dbbb9e11510f609c8f22c2f3104437de2cffc Dec 03 18:08:29 crc kubenswrapper[5002]: I1203 18:08:29.777773 5002 generic.go:334] "Generic (PLEG): container finished" podID="9ee68fc3-1a09-4d84-a183-d4d78f3f9006" containerID="a56ff886d8d328a3cec1e3e7469a99b8ec11cb82fa52e58da8d81e88ca231e97" exitCode=0 Dec 03 18:08:29 crc kubenswrapper[5002]: I1203 18:08:29.777875 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-8tvgf" event={"ID":"9ee68fc3-1a09-4d84-a183-d4d78f3f9006","Type":"ContainerDied","Data":"a56ff886d8d328a3cec1e3e7469a99b8ec11cb82fa52e58da8d81e88ca231e97"} Dec 03 18:08:29 crc kubenswrapper[5002]: I1203 18:08:29.781459 5002 generic.go:334] "Generic (PLEG): container finished" podID="ef699e44-5aa7-4513-bb2e-e51999ad601f" containerID="f1b46b627f9ef8a4cf57a6af9970080b8844b03bde954836ffa4881cb22e98c2" exitCode=0 Dec 03 18:08:29 crc kubenswrapper[5002]: I1203 18:08:29.781495 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a01f-account-create-update-94srm" event={"ID":"ef699e44-5aa7-4513-bb2e-e51999ad601f","Type":"ContainerDied","Data":"f1b46b627f9ef8a4cf57a6af9970080b8844b03bde954836ffa4881cb22e98c2"} Dec 03 18:08:29 crc kubenswrapper[5002]: I1203 18:08:29.781520 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a01f-account-create-update-94srm" event={"ID":"ef699e44-5aa7-4513-bb2e-e51999ad601f","Type":"ContainerStarted","Data":"d39952ff5d5dac11252128edba9dbbb9e11510f609c8f22c2f3104437de2cffc"} Dec 03 18:08:31 crc kubenswrapper[5002]: I1203 18:08:31.249088 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8tvgf" Dec 03 18:08:31 crc kubenswrapper[5002]: I1203 18:08:31.391702 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmk7s\" (UniqueName: \"kubernetes.io/projected/9ee68fc3-1a09-4d84-a183-d4d78f3f9006-kube-api-access-jmk7s\") pod \"9ee68fc3-1a09-4d84-a183-d4d78f3f9006\" (UID: \"9ee68fc3-1a09-4d84-a183-d4d78f3f9006\") " Dec 03 18:08:31 crc kubenswrapper[5002]: I1203 18:08:31.391928 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ee68fc3-1a09-4d84-a183-d4d78f3f9006-operator-scripts\") pod \"9ee68fc3-1a09-4d84-a183-d4d78f3f9006\" (UID: \"9ee68fc3-1a09-4d84-a183-d4d78f3f9006\") " Dec 03 18:08:31 crc kubenswrapper[5002]: I1203 18:08:31.392954 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ee68fc3-1a09-4d84-a183-d4d78f3f9006-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9ee68fc3-1a09-4d84-a183-d4d78f3f9006" (UID: "9ee68fc3-1a09-4d84-a183-d4d78f3f9006"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:08:31 crc kubenswrapper[5002]: I1203 18:08:31.397011 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ee68fc3-1a09-4d84-a183-d4d78f3f9006-kube-api-access-jmk7s" (OuterVolumeSpecName: "kube-api-access-jmk7s") pod "9ee68fc3-1a09-4d84-a183-d4d78f3f9006" (UID: "9ee68fc3-1a09-4d84-a183-d4d78f3f9006"). InnerVolumeSpecName "kube-api-access-jmk7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:08:31 crc kubenswrapper[5002]: I1203 18:08:31.494300 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ee68fc3-1a09-4d84-a183-d4d78f3f9006-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:31 crc kubenswrapper[5002]: I1203 18:08:31.494324 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmk7s\" (UniqueName: \"kubernetes.io/projected/9ee68fc3-1a09-4d84-a183-d4d78f3f9006-kube-api-access-jmk7s\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:31 crc kubenswrapper[5002]: I1203 18:08:31.805932 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-8tvgf" event={"ID":"9ee68fc3-1a09-4d84-a183-d4d78f3f9006","Type":"ContainerDied","Data":"3a2ead0aebfd573bec5fd068e8976d5257c57a332f704820bc2b1553cc680365"} Dec 03 18:08:31 crc kubenswrapper[5002]: I1203 18:08:31.805968 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a2ead0aebfd573bec5fd068e8976d5257c57a332f704820bc2b1553cc680365" Dec 03 18:08:31 crc kubenswrapper[5002]: I1203 18:08:31.806022 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8tvgf" Dec 03 18:08:31 crc kubenswrapper[5002]: I1203 18:08:31.947300 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a01f-account-create-update-94srm" Dec 03 18:08:32 crc kubenswrapper[5002]: I1203 18:08:32.006602 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hng9\" (UniqueName: \"kubernetes.io/projected/ef699e44-5aa7-4513-bb2e-e51999ad601f-kube-api-access-5hng9\") pod \"ef699e44-5aa7-4513-bb2e-e51999ad601f\" (UID: \"ef699e44-5aa7-4513-bb2e-e51999ad601f\") " Dec 03 18:08:32 crc kubenswrapper[5002]: I1203 18:08:32.006669 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef699e44-5aa7-4513-bb2e-e51999ad601f-operator-scripts\") pod \"ef699e44-5aa7-4513-bb2e-e51999ad601f\" (UID: \"ef699e44-5aa7-4513-bb2e-e51999ad601f\") " Dec 03 18:08:32 crc kubenswrapper[5002]: I1203 18:08:32.007395 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef699e44-5aa7-4513-bb2e-e51999ad601f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ef699e44-5aa7-4513-bb2e-e51999ad601f" (UID: "ef699e44-5aa7-4513-bb2e-e51999ad601f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:08:32 crc kubenswrapper[5002]: I1203 18:08:32.010491 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef699e44-5aa7-4513-bb2e-e51999ad601f-kube-api-access-5hng9" (OuterVolumeSpecName: "kube-api-access-5hng9") pod "ef699e44-5aa7-4513-bb2e-e51999ad601f" (UID: "ef699e44-5aa7-4513-bb2e-e51999ad601f"). InnerVolumeSpecName "kube-api-access-5hng9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:08:32 crc kubenswrapper[5002]: I1203 18:08:32.108371 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hng9\" (UniqueName: \"kubernetes.io/projected/ef699e44-5aa7-4513-bb2e-e51999ad601f-kube-api-access-5hng9\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:32 crc kubenswrapper[5002]: I1203 18:08:32.108404 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef699e44-5aa7-4513-bb2e-e51999ad601f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:32 crc kubenswrapper[5002]: I1203 18:08:32.821461 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a01f-account-create-update-94srm" event={"ID":"ef699e44-5aa7-4513-bb2e-e51999ad601f","Type":"ContainerDied","Data":"d39952ff5d5dac11252128edba9dbbb9e11510f609c8f22c2f3104437de2cffc"} Dec 03 18:08:32 crc kubenswrapper[5002]: I1203 18:08:32.821740 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d39952ff5d5dac11252128edba9dbbb9e11510f609c8f22c2f3104437de2cffc" Dec 03 18:08:32 crc kubenswrapper[5002]: I1203 18:08:32.821509 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a01f-account-create-update-94srm" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.221821 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-9wvmr"] Dec 03 18:08:38 crc kubenswrapper[5002]: E1203 18:08:38.223421 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ee68fc3-1a09-4d84-a183-d4d78f3f9006" containerName="mariadb-database-create" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.223454 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ee68fc3-1a09-4d84-a183-d4d78f3f9006" containerName="mariadb-database-create" Dec 03 18:08:38 crc kubenswrapper[5002]: E1203 18:08:38.223520 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef699e44-5aa7-4513-bb2e-e51999ad601f" containerName="mariadb-account-create-update" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.223540 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef699e44-5aa7-4513-bb2e-e51999ad601f" containerName="mariadb-account-create-update" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.223995 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef699e44-5aa7-4513-bb2e-e51999ad601f" containerName="mariadb-account-create-update" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.224062 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ee68fc3-1a09-4d84-a183-d4d78f3f9006" containerName="mariadb-database-create" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.225353 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.228014 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.231604 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-xzqdv" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.233616 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-9wvmr"] Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.333028 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcgwq\" (UniqueName: \"kubernetes.io/projected/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-kube-api-access-xcgwq\") pod \"glance-db-sync-9wvmr\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.333080 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-db-sync-config-data\") pod \"glance-db-sync-9wvmr\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.333145 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-config-data\") pod \"glance-db-sync-9wvmr\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.333230 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-combined-ca-bundle\") pod \"glance-db-sync-9wvmr\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.435939 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcgwq\" (UniqueName: \"kubernetes.io/projected/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-kube-api-access-xcgwq\") pod \"glance-db-sync-9wvmr\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.436005 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-db-sync-config-data\") pod \"glance-db-sync-9wvmr\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.436117 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-config-data\") pod \"glance-db-sync-9wvmr\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.436212 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-combined-ca-bundle\") pod \"glance-db-sync-9wvmr\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.442386 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-config-data\") pod \"glance-db-sync-9wvmr\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.446046 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-db-sync-config-data\") pod \"glance-db-sync-9wvmr\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.453297 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-combined-ca-bundle\") pod \"glance-db-sync-9wvmr\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.457599 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcgwq\" (UniqueName: \"kubernetes.io/projected/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-kube-api-access-xcgwq\") pod \"glance-db-sync-9wvmr\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:38 crc kubenswrapper[5002]: I1203 18:08:38.553404 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:39 crc kubenswrapper[5002]: I1203 18:08:39.091557 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-9wvmr"] Dec 03 18:08:39 crc kubenswrapper[5002]: W1203 18:08:39.101357 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b290899_0d62_4fab_9d24_1b3f36e7d2a4.slice/crio-3d8e5d24b4bd25edf06da487d8ab92c037b8c7b7ae013a9df0bdf282479e0882 WatchSource:0}: Error finding container 3d8e5d24b4bd25edf06da487d8ab92c037b8c7b7ae013a9df0bdf282479e0882: Status 404 returned error can't find the container with id 3d8e5d24b4bd25edf06da487d8ab92c037b8c7b7ae013a9df0bdf282479e0882 Dec 03 18:08:39 crc kubenswrapper[5002]: I1203 18:08:39.912728 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9wvmr" event={"ID":"6b290899-0d62-4fab-9d24-1b3f36e7d2a4","Type":"ContainerStarted","Data":"ac826346c2ba7d9ab474e64fabfe0891a737be696046cda133aeb7513565f1a5"} Dec 03 18:08:39 crc kubenswrapper[5002]: I1203 18:08:39.913103 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9wvmr" event={"ID":"6b290899-0d62-4fab-9d24-1b3f36e7d2a4","Type":"ContainerStarted","Data":"3d8e5d24b4bd25edf06da487d8ab92c037b8c7b7ae013a9df0bdf282479e0882"} Dec 03 18:08:39 crc kubenswrapper[5002]: I1203 18:08:39.932173 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-9wvmr" podStartSLOduration=1.932153904 podStartE2EDuration="1.932153904s" podCreationTimestamp="2025-12-03 18:08:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:08:39.930737336 +0000 UTC m=+5843.344559234" watchObservedRunningTime="2025-12-03 18:08:39.932153904 +0000 UTC m=+5843.345975792" Dec 03 18:08:42 crc kubenswrapper[5002]: I1203 18:08:42.350489 5002 scope.go:117] "RemoveContainer" containerID="83a72ee16860cd7b55bac1f8a770a883b63abdbef6e3f35d67f37aaf1862beab" Dec 03 18:08:42 crc kubenswrapper[5002]: I1203 18:08:42.373440 5002 scope.go:117] "RemoveContainer" containerID="19093cbffd37b47fb097bf5af5c5fd133eef1f8378bacd19f97a5bf125abaf1e" Dec 03 18:08:42 crc kubenswrapper[5002]: I1203 18:08:42.942744 5002 generic.go:334] "Generic (PLEG): container finished" podID="6b290899-0d62-4fab-9d24-1b3f36e7d2a4" containerID="ac826346c2ba7d9ab474e64fabfe0891a737be696046cda133aeb7513565f1a5" exitCode=0 Dec 03 18:08:42 crc kubenswrapper[5002]: I1203 18:08:42.942947 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9wvmr" event={"ID":"6b290899-0d62-4fab-9d24-1b3f36e7d2a4","Type":"ContainerDied","Data":"ac826346c2ba7d9ab474e64fabfe0891a737be696046cda133aeb7513565f1a5"} Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.364237 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.442008 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-db-sync-config-data\") pod \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.442160 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-combined-ca-bundle\") pod \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.442207 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwq\" (UniqueName: \"kubernetes.io/projected/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-kube-api-access-xcgwq\") pod \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.442292 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-config-data\") pod \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\" (UID: \"6b290899-0d62-4fab-9d24-1b3f36e7d2a4\") " Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.447981 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-kube-api-access-xcgwq" (OuterVolumeSpecName: "kube-api-access-xcgwq") pod "6b290899-0d62-4fab-9d24-1b3f36e7d2a4" (UID: "6b290899-0d62-4fab-9d24-1b3f36e7d2a4"). InnerVolumeSpecName "kube-api-access-xcgwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.448103 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "6b290899-0d62-4fab-9d24-1b3f36e7d2a4" (UID: "6b290899-0d62-4fab-9d24-1b3f36e7d2a4"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.466687 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b290899-0d62-4fab-9d24-1b3f36e7d2a4" (UID: "6b290899-0d62-4fab-9d24-1b3f36e7d2a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.507975 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-config-data" (OuterVolumeSpecName: "config-data") pod "6b290899-0d62-4fab-9d24-1b3f36e7d2a4" (UID: "6b290899-0d62-4fab-9d24-1b3f36e7d2a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.544701 5002 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.544761 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.544772 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwq\" (UniqueName: \"kubernetes.io/projected/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-kube-api-access-xcgwq\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.544783 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b290899-0d62-4fab-9d24-1b3f36e7d2a4-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.966971 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9wvmr" event={"ID":"6b290899-0d62-4fab-9d24-1b3f36e7d2a4","Type":"ContainerDied","Data":"3d8e5d24b4bd25edf06da487d8ab92c037b8c7b7ae013a9df0bdf282479e0882"} Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.967019 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d8e5d24b4bd25edf06da487d8ab92c037b8c7b7ae013a9df0bdf282479e0882" Dec 03 18:08:44 crc kubenswrapper[5002]: I1203 18:08:44.967057 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9wvmr" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.226005 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:08:45 crc kubenswrapper[5002]: E1203 18:08:45.226829 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b290899-0d62-4fab-9d24-1b3f36e7d2a4" containerName="glance-db-sync" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.226844 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b290899-0d62-4fab-9d24-1b3f36e7d2a4" containerName="glance-db-sync" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.227026 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b290899-0d62-4fab-9d24-1b3f36e7d2a4" containerName="glance-db-sync" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.232625 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.236810 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.238681 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-xzqdv" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.239559 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.244771 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.254456 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7e11163c-3a11-4cef-bee9-b34d33d4b917-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.254529 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx48t\" (UniqueName: \"kubernetes.io/projected/7e11163c-3a11-4cef-bee9-b34d33d4b917-kube-api-access-cx48t\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.254554 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-scripts\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.254685 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.254773 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-config-data\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.254852 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e11163c-3a11-4cef-bee9-b34d33d4b917-logs\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.356469 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6fb5779dc-ltkm4"] Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.357062 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7e11163c-3a11-4cef-bee9-b34d33d4b917-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.357159 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx48t\" (UniqueName: \"kubernetes.io/projected/7e11163c-3a11-4cef-bee9-b34d33d4b917-kube-api-access-cx48t\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.357192 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-scripts\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.357212 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.357529 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7e11163c-3a11-4cef-bee9-b34d33d4b917-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.357876 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-config-data\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.357906 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e11163c-3a11-4cef-bee9-b34d33d4b917-logs\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.358214 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e11163c-3a11-4cef-bee9-b34d33d4b917-logs\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.362913 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.371989 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.372004 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-scripts\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.372905 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-config-data\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.375897 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fb5779dc-ltkm4"] Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.402931 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx48t\" (UniqueName: \"kubernetes.io/projected/7e11163c-3a11-4cef-bee9-b34d33d4b917-kube-api-access-cx48t\") pod \"glance-default-external-api-0\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.439006 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.440817 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.448240 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.459778 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-dns-svc\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.459982 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-ovsdbserver-sb\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.460065 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnnw7\" (UniqueName: \"kubernetes.io/projected/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-kube-api-access-qnnw7\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.460183 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-config\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.460243 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65w6j\" (UniqueName: \"kubernetes.io/projected/91b03059-5955-49e0-82d1-29051d784641-kube-api-access-65w6j\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.460354 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.460479 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-logs\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.460553 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-ovsdbserver-nb\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.460649 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.460730 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.460865 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.467580 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.549643 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.561958 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.562014 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.562070 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.562095 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-dns-svc\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.562111 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-ovsdbserver-sb\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.562143 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnnw7\" (UniqueName: \"kubernetes.io/projected/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-kube-api-access-qnnw7\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.562177 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-config\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.562196 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65w6j\" (UniqueName: \"kubernetes.io/projected/91b03059-5955-49e0-82d1-29051d784641-kube-api-access-65w6j\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.562211 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.562236 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-logs\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.562261 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-ovsdbserver-nb\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.563148 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-ovsdbserver-nb\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.563516 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-ovsdbserver-sb\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.563521 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-dns-svc\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.563151 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-config\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.563888 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-logs\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.564232 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.567316 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.586645 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.587524 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.595958 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnnw7\" (UniqueName: \"kubernetes.io/projected/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-kube-api-access-qnnw7\") pod \"glance-default-internal-api-0\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.600490 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65w6j\" (UniqueName: \"kubernetes.io/projected/91b03059-5955-49e0-82d1-29051d784641-kube-api-access-65w6j\") pod \"dnsmasq-dns-6fb5779dc-ltkm4\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.779350 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:45 crc kubenswrapper[5002]: I1203 18:08:45.791566 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 18:08:46 crc kubenswrapper[5002]: I1203 18:08:46.216023 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:08:46 crc kubenswrapper[5002]: W1203 18:08:46.229589 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e11163c_3a11_4cef_bee9_b34d33d4b917.slice/crio-0501ca4a05bd7fa2a3626a86af5616b6b3f9fd50b483464913b9bc6e180a35a7 WatchSource:0}: Error finding container 0501ca4a05bd7fa2a3626a86af5616b6b3f9fd50b483464913b9bc6e180a35a7: Status 404 returned error can't find the container with id 0501ca4a05bd7fa2a3626a86af5616b6b3f9fd50b483464913b9bc6e180a35a7 Dec 03 18:08:46 crc kubenswrapper[5002]: I1203 18:08:46.306826 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fb5779dc-ltkm4"] Dec 03 18:08:46 crc kubenswrapper[5002]: W1203 18:08:46.315970 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91b03059_5955_49e0_82d1_29051d784641.slice/crio-f95793bf57657f1a8918f7c6572def489858b6e277abe6d3c3a63c9a8a88309e WatchSource:0}: Error finding container f95793bf57657f1a8918f7c6572def489858b6e277abe6d3c3a63c9a8a88309e: Status 404 returned error can't find the container with id f95793bf57657f1a8918f7c6572def489858b6e277abe6d3c3a63c9a8a88309e Dec 03 18:08:46 crc kubenswrapper[5002]: I1203 18:08:46.380195 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:08:46 crc kubenswrapper[5002]: W1203 18:08:46.393148 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75e736fe_71c0_4ef0_84b9_cbd1b05faca8.slice/crio-c4a05f66f56127da4b2d70260bbad8ca5bc2825a6ce991ad7aa23735c8bc1dbc WatchSource:0}: Error finding container c4a05f66f56127da4b2d70260bbad8ca5bc2825a6ce991ad7aa23735c8bc1dbc: Status 404 returned error can't find the container with id c4a05f66f56127da4b2d70260bbad8ca5bc2825a6ce991ad7aa23735c8bc1dbc Dec 03 18:08:46 crc kubenswrapper[5002]: I1203 18:08:46.593657 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:08:46 crc kubenswrapper[5002]: I1203 18:08:46.986613 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"75e736fe-71c0-4ef0-84b9-cbd1b05faca8","Type":"ContainerStarted","Data":"c4a05f66f56127da4b2d70260bbad8ca5bc2825a6ce991ad7aa23735c8bc1dbc"} Dec 03 18:08:46 crc kubenswrapper[5002]: I1203 18:08:46.988253 5002 generic.go:334] "Generic (PLEG): container finished" podID="91b03059-5955-49e0-82d1-29051d784641" containerID="af8e8145d0c2a98ab3e5233c75056d8322c422d813de26c4dd86f75ddb20cd64" exitCode=0 Dec 03 18:08:46 crc kubenswrapper[5002]: I1203 18:08:46.988303 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" event={"ID":"91b03059-5955-49e0-82d1-29051d784641","Type":"ContainerDied","Data":"af8e8145d0c2a98ab3e5233c75056d8322c422d813de26c4dd86f75ddb20cd64"} Dec 03 18:08:46 crc kubenswrapper[5002]: I1203 18:08:46.988318 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" event={"ID":"91b03059-5955-49e0-82d1-29051d784641","Type":"ContainerStarted","Data":"f95793bf57657f1a8918f7c6572def489858b6e277abe6d3c3a63c9a8a88309e"} Dec 03 18:08:46 crc kubenswrapper[5002]: I1203 18:08:46.996808 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7e11163c-3a11-4cef-bee9-b34d33d4b917","Type":"ContainerStarted","Data":"23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f"} Dec 03 18:08:46 crc kubenswrapper[5002]: I1203 18:08:46.996863 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7e11163c-3a11-4cef-bee9-b34d33d4b917","Type":"ContainerStarted","Data":"0501ca4a05bd7fa2a3626a86af5616b6b3f9fd50b483464913b9bc6e180a35a7"} Dec 03 18:08:47 crc kubenswrapper[5002]: I1203 18:08:47.963870 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.019832 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" event={"ID":"91b03059-5955-49e0-82d1-29051d784641","Type":"ContainerStarted","Data":"f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5"} Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.020925 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.025653 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7e11163c-3a11-4cef-bee9-b34d33d4b917","Type":"ContainerStarted","Data":"c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28"} Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.025824 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7e11163c-3a11-4cef-bee9-b34d33d4b917" containerName="glance-log" containerID="cri-o://23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f" gracePeriod=30 Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.026065 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7e11163c-3a11-4cef-bee9-b34d33d4b917" containerName="glance-httpd" containerID="cri-o://c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28" gracePeriod=30 Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.029918 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"75e736fe-71c0-4ef0-84b9-cbd1b05faca8","Type":"ContainerStarted","Data":"f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad"} Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.029952 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"75e736fe-71c0-4ef0-84b9-cbd1b05faca8","Type":"ContainerStarted","Data":"5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070"} Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.044043 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" podStartSLOduration=3.044024562 podStartE2EDuration="3.044024562s" podCreationTimestamp="2025-12-03 18:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:08:48.041779112 +0000 UTC m=+5851.455601020" watchObservedRunningTime="2025-12-03 18:08:48.044024562 +0000 UTC m=+5851.457846450" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.071286 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.071259605 podStartE2EDuration="3.071259605s" podCreationTimestamp="2025-12-03 18:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:08:48.062768567 +0000 UTC m=+5851.476590455" watchObservedRunningTime="2025-12-03 18:08:48.071259605 +0000 UTC m=+5851.485081513" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.086306 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.086285649 podStartE2EDuration="3.086285649s" podCreationTimestamp="2025-12-03 18:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:08:48.081960732 +0000 UTC m=+5851.495782640" watchObservedRunningTime="2025-12-03 18:08:48.086285649 +0000 UTC m=+5851.500107537" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.682909 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.857356 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cx48t\" (UniqueName: \"kubernetes.io/projected/7e11163c-3a11-4cef-bee9-b34d33d4b917-kube-api-access-cx48t\") pod \"7e11163c-3a11-4cef-bee9-b34d33d4b917\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.857447 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-config-data\") pod \"7e11163c-3a11-4cef-bee9-b34d33d4b917\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.857501 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-scripts\") pod \"7e11163c-3a11-4cef-bee9-b34d33d4b917\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.857547 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7e11163c-3a11-4cef-bee9-b34d33d4b917-httpd-run\") pod \"7e11163c-3a11-4cef-bee9-b34d33d4b917\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.857594 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-combined-ca-bundle\") pod \"7e11163c-3a11-4cef-bee9-b34d33d4b917\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.857621 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e11163c-3a11-4cef-bee9-b34d33d4b917-logs\") pod \"7e11163c-3a11-4cef-bee9-b34d33d4b917\" (UID: \"7e11163c-3a11-4cef-bee9-b34d33d4b917\") " Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.858036 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e11163c-3a11-4cef-bee9-b34d33d4b917-logs" (OuterVolumeSpecName: "logs") pod "7e11163c-3a11-4cef-bee9-b34d33d4b917" (UID: "7e11163c-3a11-4cef-bee9-b34d33d4b917"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.858061 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e11163c-3a11-4cef-bee9-b34d33d4b917-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7e11163c-3a11-4cef-bee9-b34d33d4b917" (UID: "7e11163c-3a11-4cef-bee9-b34d33d4b917"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.858507 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e11163c-3a11-4cef-bee9-b34d33d4b917-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.858531 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7e11163c-3a11-4cef-bee9-b34d33d4b917-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.864104 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e11163c-3a11-4cef-bee9-b34d33d4b917-kube-api-access-cx48t" (OuterVolumeSpecName: "kube-api-access-cx48t") pod "7e11163c-3a11-4cef-bee9-b34d33d4b917" (UID: "7e11163c-3a11-4cef-bee9-b34d33d4b917"). InnerVolumeSpecName "kube-api-access-cx48t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.864329 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-scripts" (OuterVolumeSpecName: "scripts") pod "7e11163c-3a11-4cef-bee9-b34d33d4b917" (UID: "7e11163c-3a11-4cef-bee9-b34d33d4b917"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.888956 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e11163c-3a11-4cef-bee9-b34d33d4b917" (UID: "7e11163c-3a11-4cef-bee9-b34d33d4b917"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.916916 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-config-data" (OuterVolumeSpecName: "config-data") pod "7e11163c-3a11-4cef-bee9-b34d33d4b917" (UID: "7e11163c-3a11-4cef-bee9-b34d33d4b917"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.960173 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.960218 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cx48t\" (UniqueName: \"kubernetes.io/projected/7e11163c-3a11-4cef-bee9-b34d33d4b917-kube-api-access-cx48t\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.960233 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:48 crc kubenswrapper[5002]: I1203 18:08:48.960244 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e11163c-3a11-4cef-bee9-b34d33d4b917-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.040922 5002 generic.go:334] "Generic (PLEG): container finished" podID="7e11163c-3a11-4cef-bee9-b34d33d4b917" containerID="c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28" exitCode=0 Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.040972 5002 generic.go:334] "Generic (PLEG): container finished" podID="7e11163c-3a11-4cef-bee9-b34d33d4b917" containerID="23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f" exitCode=143 Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.041341 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7e11163c-3a11-4cef-bee9-b34d33d4b917","Type":"ContainerDied","Data":"c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28"} Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.041370 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7e11163c-3a11-4cef-bee9-b34d33d4b917","Type":"ContainerDied","Data":"23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f"} Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.041383 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7e11163c-3a11-4cef-bee9-b34d33d4b917","Type":"ContainerDied","Data":"0501ca4a05bd7fa2a3626a86af5616b6b3f9fd50b483464913b9bc6e180a35a7"} Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.041398 5002 scope.go:117] "RemoveContainer" containerID="c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.041548 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.041678 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="75e736fe-71c0-4ef0-84b9-cbd1b05faca8" containerName="glance-log" containerID="cri-o://5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070" gracePeriod=30 Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.041847 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="75e736fe-71c0-4ef0-84b9-cbd1b05faca8" containerName="glance-httpd" containerID="cri-o://f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad" gracePeriod=30 Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.081830 5002 scope.go:117] "RemoveContainer" containerID="23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.083967 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.106907 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.110697 5002 scope.go:117] "RemoveContainer" containerID="c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28" Dec 03 18:08:49 crc kubenswrapper[5002]: E1203 18:08:49.111199 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28\": container with ID starting with c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28 not found: ID does not exist" containerID="c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.111226 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28"} err="failed to get container status \"c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28\": rpc error: code = NotFound desc = could not find container \"c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28\": container with ID starting with c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28 not found: ID does not exist" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.111274 5002 scope.go:117] "RemoveContainer" containerID="23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f" Dec 03 18:08:49 crc kubenswrapper[5002]: E1203 18:08:49.115306 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f\": container with ID starting with 23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f not found: ID does not exist" containerID="23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.115356 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f"} err="failed to get container status \"23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f\": rpc error: code = NotFound desc = could not find container \"23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f\": container with ID starting with 23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f not found: ID does not exist" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.115384 5002 scope.go:117] "RemoveContainer" containerID="c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.115675 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28"} err="failed to get container status \"c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28\": rpc error: code = NotFound desc = could not find container \"c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28\": container with ID starting with c0897ba8993588e27e35e758b32a9e1123899801ec067a720e8fcc49852a4d28 not found: ID does not exist" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.115694 5002 scope.go:117] "RemoveContainer" containerID="23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.115871 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:08:49 crc kubenswrapper[5002]: E1203 18:08:49.116264 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e11163c-3a11-4cef-bee9-b34d33d4b917" containerName="glance-log" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.116280 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e11163c-3a11-4cef-bee9-b34d33d4b917" containerName="glance-log" Dec 03 18:08:49 crc kubenswrapper[5002]: E1203 18:08:49.116303 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e11163c-3a11-4cef-bee9-b34d33d4b917" containerName="glance-httpd" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.116310 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e11163c-3a11-4cef-bee9-b34d33d4b917" containerName="glance-httpd" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.116490 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e11163c-3a11-4cef-bee9-b34d33d4b917" containerName="glance-httpd" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.116511 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e11163c-3a11-4cef-bee9-b34d33d4b917" containerName="glance-log" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.117564 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.118417 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f"} err="failed to get container status \"23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f\": rpc error: code = NotFound desc = could not find container \"23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f\": container with ID starting with 23d92a7724e1aac178cc39b741d30ff826273a99d03c01e82a47999f7a64f49f not found: ID does not exist" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.124868 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.125053 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.125883 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.270543 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.270591 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.270636 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e72f45c-d84e-49b6-bd09-faf99c184f8e-logs\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.270682 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-config-data\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.270708 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e72f45c-d84e-49b6-bd09-faf99c184f8e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.270724 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-scripts\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.270883 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj6qt\" (UniqueName: \"kubernetes.io/projected/0e72f45c-d84e-49b6-bd09-faf99c184f8e-kube-api-access-fj6qt\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.372971 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-config-data\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.373021 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e72f45c-d84e-49b6-bd09-faf99c184f8e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.373041 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-scripts\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.373069 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj6qt\" (UniqueName: \"kubernetes.io/projected/0e72f45c-d84e-49b6-bd09-faf99c184f8e-kube-api-access-fj6qt\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.373141 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.373160 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.373194 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e72f45c-d84e-49b6-bd09-faf99c184f8e-logs\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.373627 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e72f45c-d84e-49b6-bd09-faf99c184f8e-logs\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.374259 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e72f45c-d84e-49b6-bd09-faf99c184f8e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.378178 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-scripts\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.378205 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.379060 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-config-data\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.380854 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.394332 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj6qt\" (UniqueName: \"kubernetes.io/projected/0e72f45c-d84e-49b6-bd09-faf99c184f8e-kube-api-access-fj6qt\") pod \"glance-default-external-api-0\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.448119 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.718156 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.881574 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-httpd-run\") pod \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.881955 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-scripts\") pod \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.881984 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-logs\") pod \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.882042 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnnw7\" (UniqueName: \"kubernetes.io/projected/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-kube-api-access-qnnw7\") pod \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.882066 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-config-data\") pod \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.882096 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "75e736fe-71c0-4ef0-84b9-cbd1b05faca8" (UID: "75e736fe-71c0-4ef0-84b9-cbd1b05faca8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.882135 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-combined-ca-bundle\") pod \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\" (UID: \"75e736fe-71c0-4ef0-84b9-cbd1b05faca8\") " Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.882521 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.882679 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-logs" (OuterVolumeSpecName: "logs") pod "75e736fe-71c0-4ef0-84b9-cbd1b05faca8" (UID: "75e736fe-71c0-4ef0-84b9-cbd1b05faca8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.899385 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-kube-api-access-qnnw7" (OuterVolumeSpecName: "kube-api-access-qnnw7") pod "75e736fe-71c0-4ef0-84b9-cbd1b05faca8" (UID: "75e736fe-71c0-4ef0-84b9-cbd1b05faca8"). InnerVolumeSpecName "kube-api-access-qnnw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.914514 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-scripts" (OuterVolumeSpecName: "scripts") pod "75e736fe-71c0-4ef0-84b9-cbd1b05faca8" (UID: "75e736fe-71c0-4ef0-84b9-cbd1b05faca8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.964179 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-config-data" (OuterVolumeSpecName: "config-data") pod "75e736fe-71c0-4ef0-84b9-cbd1b05faca8" (UID: "75e736fe-71c0-4ef0-84b9-cbd1b05faca8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.975058 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "75e736fe-71c0-4ef0-84b9-cbd1b05faca8" (UID: "75e736fe-71c0-4ef0-84b9-cbd1b05faca8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.991515 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.991553 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.991570 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.991584 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnnw7\" (UniqueName: \"kubernetes.io/projected/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-kube-api-access-qnnw7\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.991602 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75e736fe-71c0-4ef0-84b9-cbd1b05faca8-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:49 crc kubenswrapper[5002]: I1203 18:08:49.999003 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.054154 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0e72f45c-d84e-49b6-bd09-faf99c184f8e","Type":"ContainerStarted","Data":"3ffd1c2803810d3ef6b845fcbe3adb490ac193bed0421173394f891b3e29f05b"} Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.060558 5002 generic.go:334] "Generic (PLEG): container finished" podID="75e736fe-71c0-4ef0-84b9-cbd1b05faca8" containerID="f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad" exitCode=0 Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.060636 5002 generic.go:334] "Generic (PLEG): container finished" podID="75e736fe-71c0-4ef0-84b9-cbd1b05faca8" containerID="5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070" exitCode=143 Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.062057 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.065936 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"75e736fe-71c0-4ef0-84b9-cbd1b05faca8","Type":"ContainerDied","Data":"f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad"} Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.065993 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"75e736fe-71c0-4ef0-84b9-cbd1b05faca8","Type":"ContainerDied","Data":"5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070"} Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.066008 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"75e736fe-71c0-4ef0-84b9-cbd1b05faca8","Type":"ContainerDied","Data":"c4a05f66f56127da4b2d70260bbad8ca5bc2825a6ce991ad7aa23735c8bc1dbc"} Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.066030 5002 scope.go:117] "RemoveContainer" containerID="f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.104763 5002 scope.go:117] "RemoveContainer" containerID="5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.111837 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.122274 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.133068 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:08:50 crc kubenswrapper[5002]: E1203 18:08:50.133562 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75e736fe-71c0-4ef0-84b9-cbd1b05faca8" containerName="glance-log" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.133588 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="75e736fe-71c0-4ef0-84b9-cbd1b05faca8" containerName="glance-log" Dec 03 18:08:50 crc kubenswrapper[5002]: E1203 18:08:50.133626 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75e736fe-71c0-4ef0-84b9-cbd1b05faca8" containerName="glance-httpd" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.133635 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="75e736fe-71c0-4ef0-84b9-cbd1b05faca8" containerName="glance-httpd" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.133952 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="75e736fe-71c0-4ef0-84b9-cbd1b05faca8" containerName="glance-log" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.133984 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="75e736fe-71c0-4ef0-84b9-cbd1b05faca8" containerName="glance-httpd" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.134466 5002 scope.go:117] "RemoveContainer" containerID="f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.135226 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: E1203 18:08:50.136092 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad\": container with ID starting with f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad not found: ID does not exist" containerID="f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.136184 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad"} err="failed to get container status \"f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad\": rpc error: code = NotFound desc = could not find container \"f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad\": container with ID starting with f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad not found: ID does not exist" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.136261 5002 scope.go:117] "RemoveContainer" containerID="5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.137440 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 18:08:50 crc kubenswrapper[5002]: E1203 18:08:50.137474 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070\": container with ID starting with 5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070 not found: ID does not exist" containerID="5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.137610 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070"} err="failed to get container status \"5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070\": rpc error: code = NotFound desc = could not find container \"5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070\": container with ID starting with 5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070 not found: ID does not exist" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.137632 5002 scope.go:117] "RemoveContainer" containerID="f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.137447 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.137998 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad"} err="failed to get container status \"f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad\": rpc error: code = NotFound desc = could not find container \"f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad\": container with ID starting with f99afa24cdc28cc9be8a96f38fcde94844ea2f01e0d6d46cb5f161f125a070ad not found: ID does not exist" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.138071 5002 scope.go:117] "RemoveContainer" containerID="5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.138396 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070"} err="failed to get container status \"5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070\": rpc error: code = NotFound desc = could not find container \"5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070\": container with ID starting with 5284726facd08f4660642c74a2fbe7da22fc547e004c0e9050f71850b6883070 not found: ID does not exist" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.140275 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.299121 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d8902c04-7626-46d3-a514-9e3149ba3020-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.299189 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8902c04-7626-46d3-a514-9e3149ba3020-logs\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.299239 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.299269 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8w6q\" (UniqueName: \"kubernetes.io/projected/d8902c04-7626-46d3-a514-9e3149ba3020-kube-api-access-r8w6q\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.299359 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.299461 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.299503 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.401521 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.401576 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.401616 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d8902c04-7626-46d3-a514-9e3149ba3020-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.401640 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8902c04-7626-46d3-a514-9e3149ba3020-logs\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.401671 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.401693 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8w6q\" (UniqueName: \"kubernetes.io/projected/d8902c04-7626-46d3-a514-9e3149ba3020-kube-api-access-r8w6q\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.401731 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.402916 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d8902c04-7626-46d3-a514-9e3149ba3020-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.403036 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8902c04-7626-46d3-a514-9e3149ba3020-logs\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.415584 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.415688 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.416087 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.416286 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.418569 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8w6q\" (UniqueName: \"kubernetes.io/projected/d8902c04-7626-46d3-a514-9e3149ba3020-kube-api-access-r8w6q\") pod \"glance-default-internal-api-0\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.499524 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.852249 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75e736fe-71c0-4ef0-84b9-cbd1b05faca8" path="/var/lib/kubelet/pods/75e736fe-71c0-4ef0-84b9-cbd1b05faca8/volumes" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.853126 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e11163c-3a11-4cef-bee9-b34d33d4b917" path="/var/lib/kubelet/pods/7e11163c-3a11-4cef-bee9-b34d33d4b917/volumes" Dec 03 18:08:50 crc kubenswrapper[5002]: I1203 18:08:50.999221 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:08:51 crc kubenswrapper[5002]: W1203 18:08:51.010909 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8902c04_7626_46d3_a514_9e3149ba3020.slice/crio-98dc8a2fdb4e69c9ee9dbbf7219be134cca7ba34346cf79ad3b88368966a2bcb WatchSource:0}: Error finding container 98dc8a2fdb4e69c9ee9dbbf7219be134cca7ba34346cf79ad3b88368966a2bcb: Status 404 returned error can't find the container with id 98dc8a2fdb4e69c9ee9dbbf7219be134cca7ba34346cf79ad3b88368966a2bcb Dec 03 18:08:51 crc kubenswrapper[5002]: I1203 18:08:51.074179 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0e72f45c-d84e-49b6-bd09-faf99c184f8e","Type":"ContainerStarted","Data":"229c6cf1fce60475a3ae4f0cd39f36ee195ec22cbee67ff139b8578f04b9fe55"} Dec 03 18:08:51 crc kubenswrapper[5002]: I1203 18:08:51.078874 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d8902c04-7626-46d3-a514-9e3149ba3020","Type":"ContainerStarted","Data":"98dc8a2fdb4e69c9ee9dbbf7219be134cca7ba34346cf79ad3b88368966a2bcb"} Dec 03 18:08:52 crc kubenswrapper[5002]: I1203 18:08:52.095463 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d8902c04-7626-46d3-a514-9e3149ba3020","Type":"ContainerStarted","Data":"d088a4958ae42d3b7f345a7c1f4652bf3e39a7f295405fb6d700d60379663dba"} Dec 03 18:08:52 crc kubenswrapper[5002]: I1203 18:08:52.096096 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d8902c04-7626-46d3-a514-9e3149ba3020","Type":"ContainerStarted","Data":"67c245d3b6da80f64e2dc1080b95102c1606568b44fe7704cc95cc88eb268df0"} Dec 03 18:08:52 crc kubenswrapper[5002]: I1203 18:08:52.099812 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0e72f45c-d84e-49b6-bd09-faf99c184f8e","Type":"ContainerStarted","Data":"66de54d76af31b2913e68e652216415e395a8c22c89e7bb2629c3399cb430057"} Dec 03 18:08:52 crc kubenswrapper[5002]: I1203 18:08:52.129052 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.129023143 podStartE2EDuration="2.129023143s" podCreationTimestamp="2025-12-03 18:08:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:08:52.117593496 +0000 UTC m=+5855.531415414" watchObservedRunningTime="2025-12-03 18:08:52.129023143 +0000 UTC m=+5855.542845041" Dec 03 18:08:52 crc kubenswrapper[5002]: I1203 18:08:52.147493 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.147468579 podStartE2EDuration="3.147468579s" podCreationTimestamp="2025-12-03 18:08:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:08:52.143935044 +0000 UTC m=+5855.557756942" watchObservedRunningTime="2025-12-03 18:08:52.147468579 +0000 UTC m=+5855.561290507" Dec 03 18:08:55 crc kubenswrapper[5002]: I1203 18:08:55.781653 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:08:55 crc kubenswrapper[5002]: I1203 18:08:55.883710 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5457b7bdcf-k79t9"] Dec 03 18:08:55 crc kubenswrapper[5002]: I1203 18:08:55.884015 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" podUID="acda6a07-aaed-4e3d-8b41-4bee23141f2f" containerName="dnsmasq-dns" containerID="cri-o://6b379d16cb549caea061692c36fe3607343b439ecd1ba33fbbeecbeb92017427" gracePeriod=10 Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.140481 5002 generic.go:334] "Generic (PLEG): container finished" podID="acda6a07-aaed-4e3d-8b41-4bee23141f2f" containerID="6b379d16cb549caea061692c36fe3607343b439ecd1ba33fbbeecbeb92017427" exitCode=0 Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.140572 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" event={"ID":"acda6a07-aaed-4e3d-8b41-4bee23141f2f","Type":"ContainerDied","Data":"6b379d16cb549caea061692c36fe3607343b439ecd1ba33fbbeecbeb92017427"} Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.395288 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.518974 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-dns-svc\") pod \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.519109 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-config\") pod \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.519267 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4q2hg\" (UniqueName: \"kubernetes.io/projected/acda6a07-aaed-4e3d-8b41-4bee23141f2f-kube-api-access-4q2hg\") pod \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.519304 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-ovsdbserver-sb\") pod \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.520118 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-ovsdbserver-nb\") pod \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\" (UID: \"acda6a07-aaed-4e3d-8b41-4bee23141f2f\") " Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.524647 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acda6a07-aaed-4e3d-8b41-4bee23141f2f-kube-api-access-4q2hg" (OuterVolumeSpecName: "kube-api-access-4q2hg") pod "acda6a07-aaed-4e3d-8b41-4bee23141f2f" (UID: "acda6a07-aaed-4e3d-8b41-4bee23141f2f"). InnerVolumeSpecName "kube-api-access-4q2hg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.570486 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "acda6a07-aaed-4e3d-8b41-4bee23141f2f" (UID: "acda6a07-aaed-4e3d-8b41-4bee23141f2f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.570927 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-config" (OuterVolumeSpecName: "config") pod "acda6a07-aaed-4e3d-8b41-4bee23141f2f" (UID: "acda6a07-aaed-4e3d-8b41-4bee23141f2f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.577785 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "acda6a07-aaed-4e3d-8b41-4bee23141f2f" (UID: "acda6a07-aaed-4e3d-8b41-4bee23141f2f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.590489 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "acda6a07-aaed-4e3d-8b41-4bee23141f2f" (UID: "acda6a07-aaed-4e3d-8b41-4bee23141f2f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.622586 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.622987 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.623005 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.623018 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4q2hg\" (UniqueName: \"kubernetes.io/projected/acda6a07-aaed-4e3d-8b41-4bee23141f2f-kube-api-access-4q2hg\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:56 crc kubenswrapper[5002]: I1203 18:08:56.623032 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/acda6a07-aaed-4e3d-8b41-4bee23141f2f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 18:08:57 crc kubenswrapper[5002]: I1203 18:08:57.156966 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" event={"ID":"acda6a07-aaed-4e3d-8b41-4bee23141f2f","Type":"ContainerDied","Data":"9949060be3d99579636b45faedfb46191fd560851c226cc646bd5fc8809b7325"} Dec 03 18:08:57 crc kubenswrapper[5002]: I1203 18:08:57.157033 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5457b7bdcf-k79t9" Dec 03 18:08:57 crc kubenswrapper[5002]: I1203 18:08:57.157055 5002 scope.go:117] "RemoveContainer" containerID="6b379d16cb549caea061692c36fe3607343b439ecd1ba33fbbeecbeb92017427" Dec 03 18:08:57 crc kubenswrapper[5002]: I1203 18:08:57.187930 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5457b7bdcf-k79t9"] Dec 03 18:08:57 crc kubenswrapper[5002]: I1203 18:08:57.194159 5002 scope.go:117] "RemoveContainer" containerID="f91e5a2ed97d3e0fc55b33ac8413a866b2a5a2128c01d82e0884e70213b088d5" Dec 03 18:08:57 crc kubenswrapper[5002]: I1203 18:08:57.195272 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5457b7bdcf-k79t9"] Dec 03 18:08:58 crc kubenswrapper[5002]: I1203 18:08:58.853781 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acda6a07-aaed-4e3d-8b41-4bee23141f2f" path="/var/lib/kubelet/pods/acda6a07-aaed-4e3d-8b41-4bee23141f2f/volumes" Dec 03 18:08:59 crc kubenswrapper[5002]: I1203 18:08:59.449338 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 18:08:59 crc kubenswrapper[5002]: I1203 18:08:59.449426 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 18:08:59 crc kubenswrapper[5002]: I1203 18:08:59.477505 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 18:08:59 crc kubenswrapper[5002]: I1203 18:08:59.488020 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 18:09:00 crc kubenswrapper[5002]: I1203 18:09:00.193988 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 18:09:00 crc kubenswrapper[5002]: I1203 18:09:00.194446 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 18:09:00 crc kubenswrapper[5002]: I1203 18:09:00.501532 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 18:09:00 crc kubenswrapper[5002]: I1203 18:09:00.501595 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 18:09:00 crc kubenswrapper[5002]: I1203 18:09:00.549595 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 18:09:00 crc kubenswrapper[5002]: I1203 18:09:00.571953 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 18:09:01 crc kubenswrapper[5002]: I1203 18:09:01.202276 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 18:09:01 crc kubenswrapper[5002]: I1203 18:09:01.202318 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 18:09:02 crc kubenswrapper[5002]: I1203 18:09:02.125763 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 18:09:02 crc kubenswrapper[5002]: I1203 18:09:02.128353 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 18:09:03 crc kubenswrapper[5002]: I1203 18:09:03.206106 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 18:09:03 crc kubenswrapper[5002]: I1203 18:09:03.220865 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 18:09:03 crc kubenswrapper[5002]: I1203 18:09:03.275316 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 18:09:05 crc kubenswrapper[5002]: E1203 18:09:05.578823 5002 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.155:57294->38.102.83.155:42635: write tcp 38.102.83.155:57294->38.102.83.155:42635: write: broken pipe Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.019876 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-tltrr"] Dec 03 18:09:13 crc kubenswrapper[5002]: E1203 18:09:13.020731 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acda6a07-aaed-4e3d-8b41-4bee23141f2f" containerName="init" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.020762 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="acda6a07-aaed-4e3d-8b41-4bee23141f2f" containerName="init" Dec 03 18:09:13 crc kubenswrapper[5002]: E1203 18:09:13.020791 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acda6a07-aaed-4e3d-8b41-4bee23141f2f" containerName="dnsmasq-dns" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.020798 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="acda6a07-aaed-4e3d-8b41-4bee23141f2f" containerName="dnsmasq-dns" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.021169 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="acda6a07-aaed-4e3d-8b41-4bee23141f2f" containerName="dnsmasq-dns" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.021960 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tltrr" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.029231 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tltrr"] Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.113108 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-51bd-account-create-update-zdmpv"] Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.114741 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-51bd-account-create-update-zdmpv" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.116918 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.121483 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-51bd-account-create-update-zdmpv"] Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.165968 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9j6r\" (UniqueName: \"kubernetes.io/projected/592398aa-42a0-4f3c-90af-b1d797d70463-kube-api-access-b9j6r\") pod \"placement-db-create-tltrr\" (UID: \"592398aa-42a0-4f3c-90af-b1d797d70463\") " pod="openstack/placement-db-create-tltrr" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.166009 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/592398aa-42a0-4f3c-90af-b1d797d70463-operator-scripts\") pod \"placement-db-create-tltrr\" (UID: \"592398aa-42a0-4f3c-90af-b1d797d70463\") " pod="openstack/placement-db-create-tltrr" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.268389 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wwjb\" (UniqueName: \"kubernetes.io/projected/3031e012-9670-43c9-8af7-7934214166fa-kube-api-access-2wwjb\") pod \"placement-51bd-account-create-update-zdmpv\" (UID: \"3031e012-9670-43c9-8af7-7934214166fa\") " pod="openstack/placement-51bd-account-create-update-zdmpv" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.268455 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3031e012-9670-43c9-8af7-7934214166fa-operator-scripts\") pod \"placement-51bd-account-create-update-zdmpv\" (UID: \"3031e012-9670-43c9-8af7-7934214166fa\") " pod="openstack/placement-51bd-account-create-update-zdmpv" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.268496 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9j6r\" (UniqueName: \"kubernetes.io/projected/592398aa-42a0-4f3c-90af-b1d797d70463-kube-api-access-b9j6r\") pod \"placement-db-create-tltrr\" (UID: \"592398aa-42a0-4f3c-90af-b1d797d70463\") " pod="openstack/placement-db-create-tltrr" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.268526 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/592398aa-42a0-4f3c-90af-b1d797d70463-operator-scripts\") pod \"placement-db-create-tltrr\" (UID: \"592398aa-42a0-4f3c-90af-b1d797d70463\") " pod="openstack/placement-db-create-tltrr" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.269441 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/592398aa-42a0-4f3c-90af-b1d797d70463-operator-scripts\") pod \"placement-db-create-tltrr\" (UID: \"592398aa-42a0-4f3c-90af-b1d797d70463\") " pod="openstack/placement-db-create-tltrr" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.287185 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9j6r\" (UniqueName: \"kubernetes.io/projected/592398aa-42a0-4f3c-90af-b1d797d70463-kube-api-access-b9j6r\") pod \"placement-db-create-tltrr\" (UID: \"592398aa-42a0-4f3c-90af-b1d797d70463\") " pod="openstack/placement-db-create-tltrr" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.345733 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tltrr" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.370088 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wwjb\" (UniqueName: \"kubernetes.io/projected/3031e012-9670-43c9-8af7-7934214166fa-kube-api-access-2wwjb\") pod \"placement-51bd-account-create-update-zdmpv\" (UID: \"3031e012-9670-43c9-8af7-7934214166fa\") " pod="openstack/placement-51bd-account-create-update-zdmpv" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.370156 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3031e012-9670-43c9-8af7-7934214166fa-operator-scripts\") pod \"placement-51bd-account-create-update-zdmpv\" (UID: \"3031e012-9670-43c9-8af7-7934214166fa\") " pod="openstack/placement-51bd-account-create-update-zdmpv" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.371015 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3031e012-9670-43c9-8af7-7934214166fa-operator-scripts\") pod \"placement-51bd-account-create-update-zdmpv\" (UID: \"3031e012-9670-43c9-8af7-7934214166fa\") " pod="openstack/placement-51bd-account-create-update-zdmpv" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.391143 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wwjb\" (UniqueName: \"kubernetes.io/projected/3031e012-9670-43c9-8af7-7934214166fa-kube-api-access-2wwjb\") pod \"placement-51bd-account-create-update-zdmpv\" (UID: \"3031e012-9670-43c9-8af7-7934214166fa\") " pod="openstack/placement-51bd-account-create-update-zdmpv" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.432252 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-51bd-account-create-update-zdmpv" Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.792231 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tltrr"] Dec 03 18:09:13 crc kubenswrapper[5002]: W1203 18:09:13.971219 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3031e012_9670_43c9_8af7_7934214166fa.slice/crio-1b755f11cab106e9b93b974fe45b30a44a4019bb0aa4f8eaeb81bd65ebd3c9d0 WatchSource:0}: Error finding container 1b755f11cab106e9b93b974fe45b30a44a4019bb0aa4f8eaeb81bd65ebd3c9d0: Status 404 returned error can't find the container with id 1b755f11cab106e9b93b974fe45b30a44a4019bb0aa4f8eaeb81bd65ebd3c9d0 Dec 03 18:09:13 crc kubenswrapper[5002]: I1203 18:09:13.973138 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-51bd-account-create-update-zdmpv"] Dec 03 18:09:14 crc kubenswrapper[5002]: I1203 18:09:14.329179 5002 generic.go:334] "Generic (PLEG): container finished" podID="592398aa-42a0-4f3c-90af-b1d797d70463" containerID="5eed70dbc21661b42ffba0dded5332b64d8d67e1bbb1ace10124a7d368c0a16d" exitCode=0 Dec 03 18:09:14 crc kubenswrapper[5002]: I1203 18:09:14.329274 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tltrr" event={"ID":"592398aa-42a0-4f3c-90af-b1d797d70463","Type":"ContainerDied","Data":"5eed70dbc21661b42ffba0dded5332b64d8d67e1bbb1ace10124a7d368c0a16d"} Dec 03 18:09:14 crc kubenswrapper[5002]: I1203 18:09:14.329350 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tltrr" event={"ID":"592398aa-42a0-4f3c-90af-b1d797d70463","Type":"ContainerStarted","Data":"0b0187f6e9f5174c525eafa6a6c30624af00191eaeed57d4a549f8383767f861"} Dec 03 18:09:14 crc kubenswrapper[5002]: I1203 18:09:14.331291 5002 generic.go:334] "Generic (PLEG): container finished" podID="3031e012-9670-43c9-8af7-7934214166fa" containerID="eb1bdfb237ef6a1f4425a65f2fd6e1176696c5b59741fefdeb54b6ba1ad90c9f" exitCode=0 Dec 03 18:09:14 crc kubenswrapper[5002]: I1203 18:09:14.331327 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-51bd-account-create-update-zdmpv" event={"ID":"3031e012-9670-43c9-8af7-7934214166fa","Type":"ContainerDied","Data":"eb1bdfb237ef6a1f4425a65f2fd6e1176696c5b59741fefdeb54b6ba1ad90c9f"} Dec 03 18:09:14 crc kubenswrapper[5002]: I1203 18:09:14.331634 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-51bd-account-create-update-zdmpv" event={"ID":"3031e012-9670-43c9-8af7-7934214166fa","Type":"ContainerStarted","Data":"1b755f11cab106e9b93b974fe45b30a44a4019bb0aa4f8eaeb81bd65ebd3c9d0"} Dec 03 18:09:15 crc kubenswrapper[5002]: I1203 18:09:15.760610 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tltrr" Dec 03 18:09:15 crc kubenswrapper[5002]: I1203 18:09:15.767113 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-51bd-account-create-update-zdmpv" Dec 03 18:09:15 crc kubenswrapper[5002]: I1203 18:09:15.861829 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/592398aa-42a0-4f3c-90af-b1d797d70463-operator-scripts\") pod \"592398aa-42a0-4f3c-90af-b1d797d70463\" (UID: \"592398aa-42a0-4f3c-90af-b1d797d70463\") " Dec 03 18:09:15 crc kubenswrapper[5002]: I1203 18:09:15.862226 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9j6r\" (UniqueName: \"kubernetes.io/projected/592398aa-42a0-4f3c-90af-b1d797d70463-kube-api-access-b9j6r\") pod \"592398aa-42a0-4f3c-90af-b1d797d70463\" (UID: \"592398aa-42a0-4f3c-90af-b1d797d70463\") " Dec 03 18:09:15 crc kubenswrapper[5002]: I1203 18:09:15.862530 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/592398aa-42a0-4f3c-90af-b1d797d70463-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "592398aa-42a0-4f3c-90af-b1d797d70463" (UID: "592398aa-42a0-4f3c-90af-b1d797d70463"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:09:15 crc kubenswrapper[5002]: I1203 18:09:15.863826 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/592398aa-42a0-4f3c-90af-b1d797d70463-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:15 crc kubenswrapper[5002]: I1203 18:09:15.871844 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/592398aa-42a0-4f3c-90af-b1d797d70463-kube-api-access-b9j6r" (OuterVolumeSpecName: "kube-api-access-b9j6r") pod "592398aa-42a0-4f3c-90af-b1d797d70463" (UID: "592398aa-42a0-4f3c-90af-b1d797d70463"). InnerVolumeSpecName "kube-api-access-b9j6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:09:15 crc kubenswrapper[5002]: I1203 18:09:15.968260 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3031e012-9670-43c9-8af7-7934214166fa-operator-scripts\") pod \"3031e012-9670-43c9-8af7-7934214166fa\" (UID: \"3031e012-9670-43c9-8af7-7934214166fa\") " Dec 03 18:09:15 crc kubenswrapper[5002]: I1203 18:09:15.968389 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wwjb\" (UniqueName: \"kubernetes.io/projected/3031e012-9670-43c9-8af7-7934214166fa-kube-api-access-2wwjb\") pod \"3031e012-9670-43c9-8af7-7934214166fa\" (UID: \"3031e012-9670-43c9-8af7-7934214166fa\") " Dec 03 18:09:15 crc kubenswrapper[5002]: I1203 18:09:15.968838 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9j6r\" (UniqueName: \"kubernetes.io/projected/592398aa-42a0-4f3c-90af-b1d797d70463-kube-api-access-b9j6r\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:15 crc kubenswrapper[5002]: I1203 18:09:15.971055 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3031e012-9670-43c9-8af7-7934214166fa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3031e012-9670-43c9-8af7-7934214166fa" (UID: "3031e012-9670-43c9-8af7-7934214166fa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:09:15 crc kubenswrapper[5002]: I1203 18:09:15.974997 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3031e012-9670-43c9-8af7-7934214166fa-kube-api-access-2wwjb" (OuterVolumeSpecName: "kube-api-access-2wwjb") pod "3031e012-9670-43c9-8af7-7934214166fa" (UID: "3031e012-9670-43c9-8af7-7934214166fa"). InnerVolumeSpecName "kube-api-access-2wwjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:09:16 crc kubenswrapper[5002]: I1203 18:09:16.071158 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3031e012-9670-43c9-8af7-7934214166fa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:16 crc kubenswrapper[5002]: I1203 18:09:16.071211 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wwjb\" (UniqueName: \"kubernetes.io/projected/3031e012-9670-43c9-8af7-7934214166fa-kube-api-access-2wwjb\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:16 crc kubenswrapper[5002]: I1203 18:09:16.354975 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-51bd-account-create-update-zdmpv" event={"ID":"3031e012-9670-43c9-8af7-7934214166fa","Type":"ContainerDied","Data":"1b755f11cab106e9b93b974fe45b30a44a4019bb0aa4f8eaeb81bd65ebd3c9d0"} Dec 03 18:09:16 crc kubenswrapper[5002]: I1203 18:09:16.355010 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-51bd-account-create-update-zdmpv" Dec 03 18:09:16 crc kubenswrapper[5002]: I1203 18:09:16.355028 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b755f11cab106e9b93b974fe45b30a44a4019bb0aa4f8eaeb81bd65ebd3c9d0" Dec 03 18:09:16 crc kubenswrapper[5002]: I1203 18:09:16.357545 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tltrr" event={"ID":"592398aa-42a0-4f3c-90af-b1d797d70463","Type":"ContainerDied","Data":"0b0187f6e9f5174c525eafa6a6c30624af00191eaeed57d4a549f8383767f861"} Dec 03 18:09:16 crc kubenswrapper[5002]: I1203 18:09:16.357571 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b0187f6e9f5174c525eafa6a6c30624af00191eaeed57d4a549f8383767f861" Dec 03 18:09:16 crc kubenswrapper[5002]: I1203 18:09:16.357620 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tltrr" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.469907 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79966dc9f5-bw22t"] Dec 03 18:09:18 crc kubenswrapper[5002]: E1203 18:09:18.470613 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3031e012-9670-43c9-8af7-7934214166fa" containerName="mariadb-account-create-update" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.470628 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="3031e012-9670-43c9-8af7-7934214166fa" containerName="mariadb-account-create-update" Dec 03 18:09:18 crc kubenswrapper[5002]: E1203 18:09:18.470649 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592398aa-42a0-4f3c-90af-b1d797d70463" containerName="mariadb-database-create" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.470655 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="592398aa-42a0-4f3c-90af-b1d797d70463" containerName="mariadb-database-create" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.470840 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="3031e012-9670-43c9-8af7-7934214166fa" containerName="mariadb-account-create-update" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.470860 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="592398aa-42a0-4f3c-90af-b1d797d70463" containerName="mariadb-database-create" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.471819 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.492148 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79966dc9f5-bw22t"] Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.521223 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-4msfn"] Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.522573 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.530333 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.530733 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.539093 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-lmzgw" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.539310 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdcdl\" (UniqueName: \"kubernetes.io/projected/af217565-3928-470d-a546-1a2706a76ad8-kube-api-access-hdcdl\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.539374 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-ovsdbserver-sb\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.539601 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-ovsdbserver-nb\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.539669 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pw2qw\" (UniqueName: \"kubernetes.io/projected/f8ce05c5-62fa-4073-b5c8-b196479099f3-kube-api-access-pw2qw\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.539731 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-config\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.539801 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-combined-ca-bundle\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.539875 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-scripts\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.539962 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-dns-svc\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.540029 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ce05c5-62fa-4073-b5c8-b196479099f3-logs\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.540045 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-config-data\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.549808 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-4msfn"] Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.641483 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ce05c5-62fa-4073-b5c8-b196479099f3-logs\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.641537 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-config-data\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.641568 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdcdl\" (UniqueName: \"kubernetes.io/projected/af217565-3928-470d-a546-1a2706a76ad8-kube-api-access-hdcdl\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.641614 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-ovsdbserver-sb\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.641664 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-ovsdbserver-nb\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.641695 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pw2qw\" (UniqueName: \"kubernetes.io/projected/f8ce05c5-62fa-4073-b5c8-b196479099f3-kube-api-access-pw2qw\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.641739 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-config\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.641792 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-combined-ca-bundle\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.641833 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-scripts\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.641886 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-dns-svc\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.642186 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ce05c5-62fa-4073-b5c8-b196479099f3-logs\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.643055 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-dns-svc\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.643416 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-ovsdbserver-sb\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.644242 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-config\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.644497 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-ovsdbserver-nb\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.650564 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-scripts\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.650848 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-config-data\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.651083 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-combined-ca-bundle\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.664897 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdcdl\" (UniqueName: \"kubernetes.io/projected/af217565-3928-470d-a546-1a2706a76ad8-kube-api-access-hdcdl\") pod \"dnsmasq-dns-79966dc9f5-bw22t\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.665981 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pw2qw\" (UniqueName: \"kubernetes.io/projected/f8ce05c5-62fa-4073-b5c8-b196479099f3-kube-api-access-pw2qw\") pod \"placement-db-sync-4msfn\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.794829 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:18 crc kubenswrapper[5002]: I1203 18:09:18.840581 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:19 crc kubenswrapper[5002]: I1203 18:09:19.297483 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79966dc9f5-bw22t"] Dec 03 18:09:19 crc kubenswrapper[5002]: W1203 18:09:19.301400 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf217565_3928_470d_a546_1a2706a76ad8.slice/crio-1393831c82513f7667d930f4b4d8912ac855a6dd580984ab427284361f2c15de WatchSource:0}: Error finding container 1393831c82513f7667d930f4b4d8912ac855a6dd580984ab427284361f2c15de: Status 404 returned error can't find the container with id 1393831c82513f7667d930f4b4d8912ac855a6dd580984ab427284361f2c15de Dec 03 18:09:19 crc kubenswrapper[5002]: I1203 18:09:19.417210 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" event={"ID":"af217565-3928-470d-a546-1a2706a76ad8","Type":"ContainerStarted","Data":"1393831c82513f7667d930f4b4d8912ac855a6dd580984ab427284361f2c15de"} Dec 03 18:09:19 crc kubenswrapper[5002]: I1203 18:09:19.478544 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-4msfn"] Dec 03 18:09:19 crc kubenswrapper[5002]: W1203 18:09:19.482998 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8ce05c5_62fa_4073_b5c8_b196479099f3.slice/crio-f567ca2a200a0b8feeef908b5c80f2a25373061c1be5f0fc6be6e731d9f05c5d WatchSource:0}: Error finding container f567ca2a200a0b8feeef908b5c80f2a25373061c1be5f0fc6be6e731d9f05c5d: Status 404 returned error can't find the container with id f567ca2a200a0b8feeef908b5c80f2a25373061c1be5f0fc6be6e731d9f05c5d Dec 03 18:09:20 crc kubenswrapper[5002]: I1203 18:09:20.426877 5002 generic.go:334] "Generic (PLEG): container finished" podID="af217565-3928-470d-a546-1a2706a76ad8" containerID="ea763ba905ec0796392ccda4de8153f547f8ea7ddf297d0960e5c69b692d2a64" exitCode=0 Dec 03 18:09:20 crc kubenswrapper[5002]: I1203 18:09:20.426953 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" event={"ID":"af217565-3928-470d-a546-1a2706a76ad8","Type":"ContainerDied","Data":"ea763ba905ec0796392ccda4de8153f547f8ea7ddf297d0960e5c69b692d2a64"} Dec 03 18:09:20 crc kubenswrapper[5002]: I1203 18:09:20.430195 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4msfn" event={"ID":"f8ce05c5-62fa-4073-b5c8-b196479099f3","Type":"ContainerStarted","Data":"4d74ffe2f20bfcef2741b38b7c4b2507a6b3d1671f08811e15470837334cddb9"} Dec 03 18:09:20 crc kubenswrapper[5002]: I1203 18:09:20.430251 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4msfn" event={"ID":"f8ce05c5-62fa-4073-b5c8-b196479099f3","Type":"ContainerStarted","Data":"f567ca2a200a0b8feeef908b5c80f2a25373061c1be5f0fc6be6e731d9f05c5d"} Dec 03 18:09:20 crc kubenswrapper[5002]: I1203 18:09:20.479922 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-4msfn" podStartSLOduration=2.479902931 podStartE2EDuration="2.479902931s" podCreationTimestamp="2025-12-03 18:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:09:20.473984482 +0000 UTC m=+5883.887806420" watchObservedRunningTime="2025-12-03 18:09:20.479902931 +0000 UTC m=+5883.893724819" Dec 03 18:09:21 crc kubenswrapper[5002]: I1203 18:09:21.440369 5002 generic.go:334] "Generic (PLEG): container finished" podID="f8ce05c5-62fa-4073-b5c8-b196479099f3" containerID="4d74ffe2f20bfcef2741b38b7c4b2507a6b3d1671f08811e15470837334cddb9" exitCode=0 Dec 03 18:09:21 crc kubenswrapper[5002]: I1203 18:09:21.440761 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4msfn" event={"ID":"f8ce05c5-62fa-4073-b5c8-b196479099f3","Type":"ContainerDied","Data":"4d74ffe2f20bfcef2741b38b7c4b2507a6b3d1671f08811e15470837334cddb9"} Dec 03 18:09:21 crc kubenswrapper[5002]: I1203 18:09:21.444458 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" event={"ID":"af217565-3928-470d-a546-1a2706a76ad8","Type":"ContainerStarted","Data":"60c2fb83848f3651194be7012820b2fdbdfbce4265a073c8d292082cb7df9bd6"} Dec 03 18:09:21 crc kubenswrapper[5002]: I1203 18:09:21.444846 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:21 crc kubenswrapper[5002]: I1203 18:09:21.474116 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" podStartSLOduration=3.47410019 podStartE2EDuration="3.47410019s" podCreationTimestamp="2025-12-03 18:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:09:21.473914626 +0000 UTC m=+5884.887736524" watchObservedRunningTime="2025-12-03 18:09:21.47410019 +0000 UTC m=+5884.887922078" Dec 03 18:09:22 crc kubenswrapper[5002]: I1203 18:09:22.868549 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.026241 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ce05c5-62fa-4073-b5c8-b196479099f3-logs\") pod \"f8ce05c5-62fa-4073-b5c8-b196479099f3\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.026361 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-scripts\") pod \"f8ce05c5-62fa-4073-b5c8-b196479099f3\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.026390 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-combined-ca-bundle\") pod \"f8ce05c5-62fa-4073-b5c8-b196479099f3\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.026417 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pw2qw\" (UniqueName: \"kubernetes.io/projected/f8ce05c5-62fa-4073-b5c8-b196479099f3-kube-api-access-pw2qw\") pod \"f8ce05c5-62fa-4073-b5c8-b196479099f3\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.026589 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-config-data\") pod \"f8ce05c5-62fa-4073-b5c8-b196479099f3\" (UID: \"f8ce05c5-62fa-4073-b5c8-b196479099f3\") " Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.026717 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8ce05c5-62fa-4073-b5c8-b196479099f3-logs" (OuterVolumeSpecName: "logs") pod "f8ce05c5-62fa-4073-b5c8-b196479099f3" (UID: "f8ce05c5-62fa-4073-b5c8-b196479099f3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.027006 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ce05c5-62fa-4073-b5c8-b196479099f3-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.032308 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ce05c5-62fa-4073-b5c8-b196479099f3-kube-api-access-pw2qw" (OuterVolumeSpecName: "kube-api-access-pw2qw") pod "f8ce05c5-62fa-4073-b5c8-b196479099f3" (UID: "f8ce05c5-62fa-4073-b5c8-b196479099f3"). InnerVolumeSpecName "kube-api-access-pw2qw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.034931 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-scripts" (OuterVolumeSpecName: "scripts") pod "f8ce05c5-62fa-4073-b5c8-b196479099f3" (UID: "f8ce05c5-62fa-4073-b5c8-b196479099f3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.053982 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f8ce05c5-62fa-4073-b5c8-b196479099f3" (UID: "f8ce05c5-62fa-4073-b5c8-b196479099f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.067904 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-config-data" (OuterVolumeSpecName: "config-data") pod "f8ce05c5-62fa-4073-b5c8-b196479099f3" (UID: "f8ce05c5-62fa-4073-b5c8-b196479099f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.129318 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.129359 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.129374 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ce05c5-62fa-4073-b5c8-b196479099f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.129388 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pw2qw\" (UniqueName: \"kubernetes.io/projected/f8ce05c5-62fa-4073-b5c8-b196479099f3-kube-api-access-pw2qw\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.467648 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4msfn" event={"ID":"f8ce05c5-62fa-4073-b5c8-b196479099f3","Type":"ContainerDied","Data":"f567ca2a200a0b8feeef908b5c80f2a25373061c1be5f0fc6be6e731d9f05c5d"} Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.467700 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f567ca2a200a0b8feeef908b5c80f2a25373061c1be5f0fc6be6e731d9f05c5d" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.467724 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4msfn" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.971012 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7b44f789c8-ntdkh"] Dec 03 18:09:23 crc kubenswrapper[5002]: E1203 18:09:23.971730 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ce05c5-62fa-4073-b5c8-b196479099f3" containerName="placement-db-sync" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.971764 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ce05c5-62fa-4073-b5c8-b196479099f3" containerName="placement-db-sync" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.971989 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ce05c5-62fa-4073-b5c8-b196479099f3" containerName="placement-db-sync" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.973085 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.978424 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.983946 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.983993 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-lmzgw" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.984154 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.984251 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 03 18:09:23 crc kubenswrapper[5002]: I1203 18:09:23.989299 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7b44f789c8-ntdkh"] Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.147970 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-internal-tls-certs\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.148090 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03abc328-424e-4136-8820-ebe63d719343-logs\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.148170 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-scripts\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.148222 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fr6p\" (UniqueName: \"kubernetes.io/projected/03abc328-424e-4136-8820-ebe63d719343-kube-api-access-9fr6p\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.148269 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-config-data\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.148488 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-combined-ca-bundle\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.148601 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-public-tls-certs\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.250123 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-combined-ca-bundle\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.250198 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-public-tls-certs\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.250299 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-internal-tls-certs\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.250330 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03abc328-424e-4136-8820-ebe63d719343-logs\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.250356 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-scripts\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.250378 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fr6p\" (UniqueName: \"kubernetes.io/projected/03abc328-424e-4136-8820-ebe63d719343-kube-api-access-9fr6p\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.250399 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-config-data\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.251141 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03abc328-424e-4136-8820-ebe63d719343-logs\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.255478 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-public-tls-certs\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.255879 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-internal-tls-certs\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.256100 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-combined-ca-bundle\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.258766 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-scripts\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.263535 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03abc328-424e-4136-8820-ebe63d719343-config-data\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.273599 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fr6p\" (UniqueName: \"kubernetes.io/projected/03abc328-424e-4136-8820-ebe63d719343-kube-api-access-9fr6p\") pod \"placement-7b44f789c8-ntdkh\" (UID: \"03abc328-424e-4136-8820-ebe63d719343\") " pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.315639 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:24 crc kubenswrapper[5002]: I1203 18:09:24.758424 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7b44f789c8-ntdkh"] Dec 03 18:09:25 crc kubenswrapper[5002]: I1203 18:09:25.492665 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7b44f789c8-ntdkh" event={"ID":"03abc328-424e-4136-8820-ebe63d719343","Type":"ContainerStarted","Data":"536ad32560bd353881faf1a77e4ecd259716fda1de48723f6398d003e92821d5"} Dec 03 18:09:25 crc kubenswrapper[5002]: I1203 18:09:25.493717 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:25 crc kubenswrapper[5002]: I1203 18:09:25.493830 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7b44f789c8-ntdkh" event={"ID":"03abc328-424e-4136-8820-ebe63d719343","Type":"ContainerStarted","Data":"bfc703eb8907868a46e0d74de3d3db85debf3a66e8b068026f773c667c2886d5"} Dec 03 18:09:25 crc kubenswrapper[5002]: I1203 18:09:25.493889 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7b44f789c8-ntdkh" event={"ID":"03abc328-424e-4136-8820-ebe63d719343","Type":"ContainerStarted","Data":"7fe02d2e0ea9939f4c703a8df36113a2dd1e22a8a89a9d65ba0e752f1d5ecab9"} Dec 03 18:09:25 crc kubenswrapper[5002]: I1203 18:09:25.493959 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:28 crc kubenswrapper[5002]: I1203 18:09:28.795922 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:09:28 crc kubenswrapper[5002]: I1203 18:09:28.833670 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7b44f789c8-ntdkh" podStartSLOduration=5.833645617 podStartE2EDuration="5.833645617s" podCreationTimestamp="2025-12-03 18:09:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:09:25.51664244 +0000 UTC m=+5888.930464338" watchObservedRunningTime="2025-12-03 18:09:28.833645617 +0000 UTC m=+5892.247467515" Dec 03 18:09:28 crc kubenswrapper[5002]: I1203 18:09:28.880251 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fb5779dc-ltkm4"] Dec 03 18:09:28 crc kubenswrapper[5002]: I1203 18:09:28.880635 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" podUID="91b03059-5955-49e0-82d1-29051d784641" containerName="dnsmasq-dns" containerID="cri-o://f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5" gracePeriod=10 Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.348792 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.468555 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-ovsdbserver-nb\") pod \"91b03059-5955-49e0-82d1-29051d784641\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.468773 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-config\") pod \"91b03059-5955-49e0-82d1-29051d784641\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.468865 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65w6j\" (UniqueName: \"kubernetes.io/projected/91b03059-5955-49e0-82d1-29051d784641-kube-api-access-65w6j\") pod \"91b03059-5955-49e0-82d1-29051d784641\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.468911 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-dns-svc\") pod \"91b03059-5955-49e0-82d1-29051d784641\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.468961 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-ovsdbserver-sb\") pod \"91b03059-5955-49e0-82d1-29051d784641\" (UID: \"91b03059-5955-49e0-82d1-29051d784641\") " Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.485891 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91b03059-5955-49e0-82d1-29051d784641-kube-api-access-65w6j" (OuterVolumeSpecName: "kube-api-access-65w6j") pod "91b03059-5955-49e0-82d1-29051d784641" (UID: "91b03059-5955-49e0-82d1-29051d784641"). InnerVolumeSpecName "kube-api-access-65w6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.515203 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "91b03059-5955-49e0-82d1-29051d784641" (UID: "91b03059-5955-49e0-82d1-29051d784641"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.517954 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-config" (OuterVolumeSpecName: "config") pod "91b03059-5955-49e0-82d1-29051d784641" (UID: "91b03059-5955-49e0-82d1-29051d784641"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.519338 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "91b03059-5955-49e0-82d1-29051d784641" (UID: "91b03059-5955-49e0-82d1-29051d784641"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.529561 5002 generic.go:334] "Generic (PLEG): container finished" podID="91b03059-5955-49e0-82d1-29051d784641" containerID="f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5" exitCode=0 Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.529620 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" event={"ID":"91b03059-5955-49e0-82d1-29051d784641","Type":"ContainerDied","Data":"f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5"} Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.529655 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.529688 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb5779dc-ltkm4" event={"ID":"91b03059-5955-49e0-82d1-29051d784641","Type":"ContainerDied","Data":"f95793bf57657f1a8918f7c6572def489858b6e277abe6d3c3a63c9a8a88309e"} Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.529713 5002 scope.go:117] "RemoveContainer" containerID="f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.537277 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "91b03059-5955-49e0-82d1-29051d784641" (UID: "91b03059-5955-49e0-82d1-29051d784641"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.571948 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65w6j\" (UniqueName: \"kubernetes.io/projected/91b03059-5955-49e0-82d1-29051d784641-kube-api-access-65w6j\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.572567 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.572641 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.572709 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.572787 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91b03059-5955-49e0-82d1-29051d784641-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.582721 5002 scope.go:117] "RemoveContainer" containerID="af8e8145d0c2a98ab3e5233c75056d8322c422d813de26c4dd86f75ddb20cd64" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.599792 5002 scope.go:117] "RemoveContainer" containerID="f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5" Dec 03 18:09:29 crc kubenswrapper[5002]: E1203 18:09:29.600306 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5\": container with ID starting with f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5 not found: ID does not exist" containerID="f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.600354 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5"} err="failed to get container status \"f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5\": rpc error: code = NotFound desc = could not find container \"f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5\": container with ID starting with f509ea3b9efe0a3d5b05bd7b6a48998d1f67b5d7dea53907efc9322c4716d5f5 not found: ID does not exist" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.600393 5002 scope.go:117] "RemoveContainer" containerID="af8e8145d0c2a98ab3e5233c75056d8322c422d813de26c4dd86f75ddb20cd64" Dec 03 18:09:29 crc kubenswrapper[5002]: E1203 18:09:29.600698 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af8e8145d0c2a98ab3e5233c75056d8322c422d813de26c4dd86f75ddb20cd64\": container with ID starting with af8e8145d0c2a98ab3e5233c75056d8322c422d813de26c4dd86f75ddb20cd64 not found: ID does not exist" containerID="af8e8145d0c2a98ab3e5233c75056d8322c422d813de26c4dd86f75ddb20cd64" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.600844 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af8e8145d0c2a98ab3e5233c75056d8322c422d813de26c4dd86f75ddb20cd64"} err="failed to get container status \"af8e8145d0c2a98ab3e5233c75056d8322c422d813de26c4dd86f75ddb20cd64\": rpc error: code = NotFound desc = could not find container \"af8e8145d0c2a98ab3e5233c75056d8322c422d813de26c4dd86f75ddb20cd64\": container with ID starting with af8e8145d0c2a98ab3e5233c75056d8322c422d813de26c4dd86f75ddb20cd64 not found: ID does not exist" Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.881945 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fb5779dc-ltkm4"] Dec 03 18:09:29 crc kubenswrapper[5002]: I1203 18:09:29.898898 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6fb5779dc-ltkm4"] Dec 03 18:09:30 crc kubenswrapper[5002]: I1203 18:09:30.860822 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91b03059-5955-49e0-82d1-29051d784641" path="/var/lib/kubelet/pods/91b03059-5955-49e0-82d1-29051d784641/volumes" Dec 03 18:09:42 crc kubenswrapper[5002]: I1203 18:09:42.489421 5002 scope.go:117] "RemoveContainer" containerID="99288426a7baf36ea4c4e9a36e81263b1e31e298d326342f42d24281e45b9114" Dec 03 18:09:55 crc kubenswrapper[5002]: I1203 18:09:55.452244 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:09:55 crc kubenswrapper[5002]: I1203 18:09:55.505273 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7b44f789c8-ntdkh" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.590954 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-2mw49"] Dec 03 18:10:19 crc kubenswrapper[5002]: E1203 18:10:19.591701 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91b03059-5955-49e0-82d1-29051d784641" containerName="init" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.591714 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="91b03059-5955-49e0-82d1-29051d784641" containerName="init" Dec 03 18:10:19 crc kubenswrapper[5002]: E1203 18:10:19.591740 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91b03059-5955-49e0-82d1-29051d784641" containerName="dnsmasq-dns" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.591766 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="91b03059-5955-49e0-82d1-29051d784641" containerName="dnsmasq-dns" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.591946 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="91b03059-5955-49e0-82d1-29051d784641" containerName="dnsmasq-dns" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.592547 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2mw49" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.603647 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-2mw49"] Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.686460 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-ldc6d"] Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.687864 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ldc6d" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.702175 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-ldc6d"] Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.704289 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13739dfc-aae8-4815-96e7-b59d41e09486-operator-scripts\") pod \"nova-api-db-create-2mw49\" (UID: \"13739dfc-aae8-4815-96e7-b59d41e09486\") " pod="openstack/nova-api-db-create-2mw49" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.704437 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtjt5\" (UniqueName: \"kubernetes.io/projected/13739dfc-aae8-4815-96e7-b59d41e09486-kube-api-access-qtjt5\") pod \"nova-api-db-create-2mw49\" (UID: \"13739dfc-aae8-4815-96e7-b59d41e09486\") " pod="openstack/nova-api-db-create-2mw49" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.797373 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-2e7f-account-create-update-hm9vx"] Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.798543 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2e7f-account-create-update-hm9vx" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.800462 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.805606 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtjt5\" (UniqueName: \"kubernetes.io/projected/13739dfc-aae8-4815-96e7-b59d41e09486-kube-api-access-qtjt5\") pod \"nova-api-db-create-2mw49\" (UID: \"13739dfc-aae8-4815-96e7-b59d41e09486\") " pod="openstack/nova-api-db-create-2mw49" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.805668 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g22fs\" (UniqueName: \"kubernetes.io/projected/c2569fc9-80e8-4c61-8513-8422d954e2de-kube-api-access-g22fs\") pod \"nova-cell0-db-create-ldc6d\" (UID: \"c2569fc9-80e8-4c61-8513-8422d954e2de\") " pod="openstack/nova-cell0-db-create-ldc6d" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.805699 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13739dfc-aae8-4815-96e7-b59d41e09486-operator-scripts\") pod \"nova-api-db-create-2mw49\" (UID: \"13739dfc-aae8-4815-96e7-b59d41e09486\") " pod="openstack/nova-api-db-create-2mw49" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.805722 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-2e7f-account-create-update-hm9vx"] Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.805789 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2569fc9-80e8-4c61-8513-8422d954e2de-operator-scripts\") pod \"nova-cell0-db-create-ldc6d\" (UID: \"c2569fc9-80e8-4c61-8513-8422d954e2de\") " pod="openstack/nova-cell0-db-create-ldc6d" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.806923 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13739dfc-aae8-4815-96e7-b59d41e09486-operator-scripts\") pod \"nova-api-db-create-2mw49\" (UID: \"13739dfc-aae8-4815-96e7-b59d41e09486\") " pod="openstack/nova-api-db-create-2mw49" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.826385 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtjt5\" (UniqueName: \"kubernetes.io/projected/13739dfc-aae8-4815-96e7-b59d41e09486-kube-api-access-qtjt5\") pod \"nova-api-db-create-2mw49\" (UID: \"13739dfc-aae8-4815-96e7-b59d41e09486\") " pod="openstack/nova-api-db-create-2mw49" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.897462 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-xhbnc"] Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.898732 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xhbnc" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.906993 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-xhbnc"] Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.907523 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2569fc9-80e8-4c61-8513-8422d954e2de-operator-scripts\") pod \"nova-cell0-db-create-ldc6d\" (UID: \"c2569fc9-80e8-4c61-8513-8422d954e2de\") " pod="openstack/nova-cell0-db-create-ldc6d" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.907570 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg6b5\" (UniqueName: \"kubernetes.io/projected/e1554d19-8306-4834-86f0-24395c08de62-kube-api-access-qg6b5\") pod \"nova-api-2e7f-account-create-update-hm9vx\" (UID: \"e1554d19-8306-4834-86f0-24395c08de62\") " pod="openstack/nova-api-2e7f-account-create-update-hm9vx" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.907678 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g22fs\" (UniqueName: \"kubernetes.io/projected/c2569fc9-80e8-4c61-8513-8422d954e2de-kube-api-access-g22fs\") pod \"nova-cell0-db-create-ldc6d\" (UID: \"c2569fc9-80e8-4c61-8513-8422d954e2de\") " pod="openstack/nova-cell0-db-create-ldc6d" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.907725 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1554d19-8306-4834-86f0-24395c08de62-operator-scripts\") pod \"nova-api-2e7f-account-create-update-hm9vx\" (UID: \"e1554d19-8306-4834-86f0-24395c08de62\") " pod="openstack/nova-api-2e7f-account-create-update-hm9vx" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.908844 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2569fc9-80e8-4c61-8513-8422d954e2de-operator-scripts\") pod \"nova-cell0-db-create-ldc6d\" (UID: \"c2569fc9-80e8-4c61-8513-8422d954e2de\") " pod="openstack/nova-cell0-db-create-ldc6d" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.913600 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2mw49" Dec 03 18:10:19 crc kubenswrapper[5002]: I1203 18:10:19.926061 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g22fs\" (UniqueName: \"kubernetes.io/projected/c2569fc9-80e8-4c61-8513-8422d954e2de-kube-api-access-g22fs\") pod \"nova-cell0-db-create-ldc6d\" (UID: \"c2569fc9-80e8-4c61-8513-8422d954e2de\") " pod="openstack/nova-cell0-db-create-ldc6d" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.005636 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-1875-account-create-update-fx5zd"] Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.007095 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1875-account-create-update-fx5zd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.008707 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.009134 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ldc6d" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.009966 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1554d19-8306-4834-86f0-24395c08de62-operator-scripts\") pod \"nova-api-2e7f-account-create-update-hm9vx\" (UID: \"e1554d19-8306-4834-86f0-24395c08de62\") " pod="openstack/nova-api-2e7f-account-create-update-hm9vx" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.010047 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp9xw\" (UniqueName: \"kubernetes.io/projected/055e8fd2-1d52-401b-acdd-c55bc568e4fa-kube-api-access-gp9xw\") pod \"nova-cell1-db-create-xhbnc\" (UID: \"055e8fd2-1d52-401b-acdd-c55bc568e4fa\") " pod="openstack/nova-cell1-db-create-xhbnc" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.010098 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg6b5\" (UniqueName: \"kubernetes.io/projected/e1554d19-8306-4834-86f0-24395c08de62-kube-api-access-qg6b5\") pod \"nova-api-2e7f-account-create-update-hm9vx\" (UID: \"e1554d19-8306-4834-86f0-24395c08de62\") " pod="openstack/nova-api-2e7f-account-create-update-hm9vx" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.010141 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/055e8fd2-1d52-401b-acdd-c55bc568e4fa-operator-scripts\") pod \"nova-cell1-db-create-xhbnc\" (UID: \"055e8fd2-1d52-401b-acdd-c55bc568e4fa\") " pod="openstack/nova-cell1-db-create-xhbnc" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.010928 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1554d19-8306-4834-86f0-24395c08de62-operator-scripts\") pod \"nova-api-2e7f-account-create-update-hm9vx\" (UID: \"e1554d19-8306-4834-86f0-24395c08de62\") " pod="openstack/nova-api-2e7f-account-create-update-hm9vx" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.019716 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-1875-account-create-update-fx5zd"] Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.029385 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg6b5\" (UniqueName: \"kubernetes.io/projected/e1554d19-8306-4834-86f0-24395c08de62-kube-api-access-qg6b5\") pod \"nova-api-2e7f-account-create-update-hm9vx\" (UID: \"e1554d19-8306-4834-86f0-24395c08de62\") " pod="openstack/nova-api-2e7f-account-create-update-hm9vx" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.111788 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp9xw\" (UniqueName: \"kubernetes.io/projected/055e8fd2-1d52-401b-acdd-c55bc568e4fa-kube-api-access-gp9xw\") pod \"nova-cell1-db-create-xhbnc\" (UID: \"055e8fd2-1d52-401b-acdd-c55bc568e4fa\") " pod="openstack/nova-cell1-db-create-xhbnc" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.111856 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/055e8fd2-1d52-401b-acdd-c55bc568e4fa-operator-scripts\") pod \"nova-cell1-db-create-xhbnc\" (UID: \"055e8fd2-1d52-401b-acdd-c55bc568e4fa\") " pod="openstack/nova-cell1-db-create-xhbnc" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.111921 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbsmp\" (UniqueName: \"kubernetes.io/projected/2a71c70e-3a49-4f5f-a891-145eea3e70b1-kube-api-access-jbsmp\") pod \"nova-cell0-1875-account-create-update-fx5zd\" (UID: \"2a71c70e-3a49-4f5f-a891-145eea3e70b1\") " pod="openstack/nova-cell0-1875-account-create-update-fx5zd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.111981 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a71c70e-3a49-4f5f-a891-145eea3e70b1-operator-scripts\") pod \"nova-cell0-1875-account-create-update-fx5zd\" (UID: \"2a71c70e-3a49-4f5f-a891-145eea3e70b1\") " pod="openstack/nova-cell0-1875-account-create-update-fx5zd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.112729 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/055e8fd2-1d52-401b-acdd-c55bc568e4fa-operator-scripts\") pod \"nova-cell1-db-create-xhbnc\" (UID: \"055e8fd2-1d52-401b-acdd-c55bc568e4fa\") " pod="openstack/nova-cell1-db-create-xhbnc" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.120101 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2e7f-account-create-update-hm9vx" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.132777 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp9xw\" (UniqueName: \"kubernetes.io/projected/055e8fd2-1d52-401b-acdd-c55bc568e4fa-kube-api-access-gp9xw\") pod \"nova-cell1-db-create-xhbnc\" (UID: \"055e8fd2-1d52-401b-acdd-c55bc568e4fa\") " pod="openstack/nova-cell1-db-create-xhbnc" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.218302 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbsmp\" (UniqueName: \"kubernetes.io/projected/2a71c70e-3a49-4f5f-a891-145eea3e70b1-kube-api-access-jbsmp\") pod \"nova-cell0-1875-account-create-update-fx5zd\" (UID: \"2a71c70e-3a49-4f5f-a891-145eea3e70b1\") " pod="openstack/nova-cell0-1875-account-create-update-fx5zd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.218471 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a71c70e-3a49-4f5f-a891-145eea3e70b1-operator-scripts\") pod \"nova-cell0-1875-account-create-update-fx5zd\" (UID: \"2a71c70e-3a49-4f5f-a891-145eea3e70b1\") " pod="openstack/nova-cell0-1875-account-create-update-fx5zd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.225380 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a71c70e-3a49-4f5f-a891-145eea3e70b1-operator-scripts\") pod \"nova-cell0-1875-account-create-update-fx5zd\" (UID: \"2a71c70e-3a49-4f5f-a891-145eea3e70b1\") " pod="openstack/nova-cell0-1875-account-create-update-fx5zd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.243219 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xhbnc" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.244282 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-9ba0-account-create-update-jxxpd"] Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.245922 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.255181 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.255833 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbsmp\" (UniqueName: \"kubernetes.io/projected/2a71c70e-3a49-4f5f-a891-145eea3e70b1-kube-api-access-jbsmp\") pod \"nova-cell0-1875-account-create-update-fx5zd\" (UID: \"2a71c70e-3a49-4f5f-a891-145eea3e70b1\") " pod="openstack/nova-cell0-1875-account-create-update-fx5zd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.262928 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9ba0-account-create-update-jxxpd"] Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.346152 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24t69\" (UniqueName: \"kubernetes.io/projected/4448265e-9399-4ef2-9e41-bdba3fd2fe6a-kube-api-access-24t69\") pod \"nova-cell1-9ba0-account-create-update-jxxpd\" (UID: \"4448265e-9399-4ef2-9e41-bdba3fd2fe6a\") " pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.346418 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4448265e-9399-4ef2-9e41-bdba3fd2fe6a-operator-scripts\") pod \"nova-cell1-9ba0-account-create-update-jxxpd\" (UID: \"4448265e-9399-4ef2-9e41-bdba3fd2fe6a\") " pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.447959 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24t69\" (UniqueName: \"kubernetes.io/projected/4448265e-9399-4ef2-9e41-bdba3fd2fe6a-kube-api-access-24t69\") pod \"nova-cell1-9ba0-account-create-update-jxxpd\" (UID: \"4448265e-9399-4ef2-9e41-bdba3fd2fe6a\") " pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.448004 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4448265e-9399-4ef2-9e41-bdba3fd2fe6a-operator-scripts\") pod \"nova-cell1-9ba0-account-create-update-jxxpd\" (UID: \"4448265e-9399-4ef2-9e41-bdba3fd2fe6a\") " pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.448791 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4448265e-9399-4ef2-9e41-bdba3fd2fe6a-operator-scripts\") pod \"nova-cell1-9ba0-account-create-update-jxxpd\" (UID: \"4448265e-9399-4ef2-9e41-bdba3fd2fe6a\") " pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.473332 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24t69\" (UniqueName: \"kubernetes.io/projected/4448265e-9399-4ef2-9e41-bdba3fd2fe6a-kube-api-access-24t69\") pod \"nova-cell1-9ba0-account-create-update-jxxpd\" (UID: \"4448265e-9399-4ef2-9e41-bdba3fd2fe6a\") " pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.479228 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1875-account-create-update-fx5zd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.546421 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-2mw49"] Dec 03 18:10:20 crc kubenswrapper[5002]: W1203 18:10:20.581211 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13739dfc_aae8_4815_96e7_b59d41e09486.slice/crio-3a8f0e2401667ea5a5f234f8bb98b7add73c86266e6263c8fd2628524655bb7d WatchSource:0}: Error finding container 3a8f0e2401667ea5a5f234f8bb98b7add73c86266e6263c8fd2628524655bb7d: Status 404 returned error can't find the container with id 3a8f0e2401667ea5a5f234f8bb98b7add73c86266e6263c8fd2628524655bb7d Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.641117 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.761345 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-ldc6d"] Dec 03 18:10:20 crc kubenswrapper[5002]: W1203 18:10:20.785326 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2569fc9_80e8_4c61_8513_8422d954e2de.slice/crio-3b14649f0081abbfc5d527dc85835bf7e16e5b689459f44f2c6172ae9a13b19c WatchSource:0}: Error finding container 3b14649f0081abbfc5d527dc85835bf7e16e5b689459f44f2c6172ae9a13b19c: Status 404 returned error can't find the container with id 3b14649f0081abbfc5d527dc85835bf7e16e5b689459f44f2c6172ae9a13b19c Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.786500 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-2e7f-account-create-update-hm9vx"] Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.918229 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.918288 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:10:20 crc kubenswrapper[5002]: I1203 18:10:20.943358 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-xhbnc"] Dec 03 18:10:21 crc kubenswrapper[5002]: I1203 18:10:21.013623 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2mw49" event={"ID":"13739dfc-aae8-4815-96e7-b59d41e09486","Type":"ContainerStarted","Data":"00337c103c82e951e95fe478d5390ae774dd9ccba7bc4fc35e48fc3f4ae57596"} Dec 03 18:10:21 crc kubenswrapper[5002]: I1203 18:10:21.013678 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2mw49" event={"ID":"13739dfc-aae8-4815-96e7-b59d41e09486","Type":"ContainerStarted","Data":"3a8f0e2401667ea5a5f234f8bb98b7add73c86266e6263c8fd2628524655bb7d"} Dec 03 18:10:21 crc kubenswrapper[5002]: I1203 18:10:21.017387 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2e7f-account-create-update-hm9vx" event={"ID":"e1554d19-8306-4834-86f0-24395c08de62","Type":"ContainerStarted","Data":"4d208b5b92513faa4d48c8b0e2354d8b86a9432a2f8cb34776dcf4ed27b0c357"} Dec 03 18:10:21 crc kubenswrapper[5002]: I1203 18:10:21.028229 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ldc6d" event={"ID":"c2569fc9-80e8-4c61-8513-8422d954e2de","Type":"ContainerStarted","Data":"3b14649f0081abbfc5d527dc85835bf7e16e5b689459f44f2c6172ae9a13b19c"} Dec 03 18:10:21 crc kubenswrapper[5002]: I1203 18:10:21.033918 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-1875-account-create-update-fx5zd"] Dec 03 18:10:21 crc kubenswrapper[5002]: I1203 18:10:21.036565 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xhbnc" event={"ID":"055e8fd2-1d52-401b-acdd-c55bc568e4fa","Type":"ContainerStarted","Data":"09c2698296f3ca3890ef3359e7b7f683776d941baff468e93659cce753f3b078"} Dec 03 18:10:21 crc kubenswrapper[5002]: I1203 18:10:21.039249 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-2mw49" podStartSLOduration=2.039210348 podStartE2EDuration="2.039210348s" podCreationTimestamp="2025-12-03 18:10:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:21.032337373 +0000 UTC m=+5944.446159261" watchObservedRunningTime="2025-12-03 18:10:21.039210348 +0000 UTC m=+5944.453032236" Dec 03 18:10:21 crc kubenswrapper[5002]: W1203 18:10:21.050114 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a71c70e_3a49_4f5f_a891_145eea3e70b1.slice/crio-df9b3fc18e43dd327c608f6877ad859e0d9cc5c3988e06a73b7f2c79bb0dab0f WatchSource:0}: Error finding container df9b3fc18e43dd327c608f6877ad859e0d9cc5c3988e06a73b7f2c79bb0dab0f: Status 404 returned error can't find the container with id df9b3fc18e43dd327c608f6877ad859e0d9cc5c3988e06a73b7f2c79bb0dab0f Dec 03 18:10:21 crc kubenswrapper[5002]: I1203 18:10:21.159572 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9ba0-account-create-update-jxxpd"] Dec 03 18:10:21 crc kubenswrapper[5002]: W1203 18:10:21.206793 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4448265e_9399_4ef2_9e41_bdba3fd2fe6a.slice/crio-aaa7ab07d637c43540464dd36d96a26ff69c1145f2edbb578d416c9b550afc90 WatchSource:0}: Error finding container aaa7ab07d637c43540464dd36d96a26ff69c1145f2edbb578d416c9b550afc90: Status 404 returned error can't find the container with id aaa7ab07d637c43540464dd36d96a26ff69c1145f2edbb578d416c9b550afc90 Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.045894 5002 generic.go:334] "Generic (PLEG): container finished" podID="4448265e-9399-4ef2-9e41-bdba3fd2fe6a" containerID="06bfef7697c7234b0d9ce7f48dc3f1ce9fc6347a71a541a8d12b48ff25adc6f4" exitCode=0 Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.045994 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" event={"ID":"4448265e-9399-4ef2-9e41-bdba3fd2fe6a","Type":"ContainerDied","Data":"06bfef7697c7234b0d9ce7f48dc3f1ce9fc6347a71a541a8d12b48ff25adc6f4"} Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.046233 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" event={"ID":"4448265e-9399-4ef2-9e41-bdba3fd2fe6a","Type":"ContainerStarted","Data":"aaa7ab07d637c43540464dd36d96a26ff69c1145f2edbb578d416c9b550afc90"} Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.049152 5002 generic.go:334] "Generic (PLEG): container finished" podID="e1554d19-8306-4834-86f0-24395c08de62" containerID="f2de2ff71ab389e2fadb93c19fc3aab7ea754fdd5e26d68c73eb27de17efaa64" exitCode=0 Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.049308 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2e7f-account-create-update-hm9vx" event={"ID":"e1554d19-8306-4834-86f0-24395c08de62","Type":"ContainerDied","Data":"f2de2ff71ab389e2fadb93c19fc3aab7ea754fdd5e26d68c73eb27de17efaa64"} Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.050761 5002 generic.go:334] "Generic (PLEG): container finished" podID="c2569fc9-80e8-4c61-8513-8422d954e2de" containerID="f57dad9a8086d5be1fb1dd7f7800833c8c0ffacb449ab818e5a354d1c70b8e7c" exitCode=0 Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.050819 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ldc6d" event={"ID":"c2569fc9-80e8-4c61-8513-8422d954e2de","Type":"ContainerDied","Data":"f57dad9a8086d5be1fb1dd7f7800833c8c0ffacb449ab818e5a354d1c70b8e7c"} Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.052464 5002 generic.go:334] "Generic (PLEG): container finished" podID="055e8fd2-1d52-401b-acdd-c55bc568e4fa" containerID="a09fb65f91f4c09e8e31acf07cdb0e1abc946a43d4e0333ff62b7fafafebfac1" exitCode=0 Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.052511 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xhbnc" event={"ID":"055e8fd2-1d52-401b-acdd-c55bc568e4fa","Type":"ContainerDied","Data":"a09fb65f91f4c09e8e31acf07cdb0e1abc946a43d4e0333ff62b7fafafebfac1"} Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.053984 5002 generic.go:334] "Generic (PLEG): container finished" podID="2a71c70e-3a49-4f5f-a891-145eea3e70b1" containerID="b79d3c183a49f39dbfad85ac5c6b92ea38d7cdd82b8eea1c33ad8da79b1d7e67" exitCode=0 Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.054030 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1875-account-create-update-fx5zd" event={"ID":"2a71c70e-3a49-4f5f-a891-145eea3e70b1","Type":"ContainerDied","Data":"b79d3c183a49f39dbfad85ac5c6b92ea38d7cdd82b8eea1c33ad8da79b1d7e67"} Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.054049 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1875-account-create-update-fx5zd" event={"ID":"2a71c70e-3a49-4f5f-a891-145eea3e70b1","Type":"ContainerStarted","Data":"df9b3fc18e43dd327c608f6877ad859e0d9cc5c3988e06a73b7f2c79bb0dab0f"} Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.055253 5002 generic.go:334] "Generic (PLEG): container finished" podID="13739dfc-aae8-4815-96e7-b59d41e09486" containerID="00337c103c82e951e95fe478d5390ae774dd9ccba7bc4fc35e48fc3f4ae57596" exitCode=0 Dec 03 18:10:22 crc kubenswrapper[5002]: I1203 18:10:22.055283 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2mw49" event={"ID":"13739dfc-aae8-4815-96e7-b59d41e09486","Type":"ContainerDied","Data":"00337c103c82e951e95fe478d5390ae774dd9ccba7bc4fc35e48fc3f4ae57596"} Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.463422 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1875-account-create-update-fx5zd" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.508361 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbsmp\" (UniqueName: \"kubernetes.io/projected/2a71c70e-3a49-4f5f-a891-145eea3e70b1-kube-api-access-jbsmp\") pod \"2a71c70e-3a49-4f5f-a891-145eea3e70b1\" (UID: \"2a71c70e-3a49-4f5f-a891-145eea3e70b1\") " Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.508564 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a71c70e-3a49-4f5f-a891-145eea3e70b1-operator-scripts\") pod \"2a71c70e-3a49-4f5f-a891-145eea3e70b1\" (UID: \"2a71c70e-3a49-4f5f-a891-145eea3e70b1\") " Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.509420 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a71c70e-3a49-4f5f-a891-145eea3e70b1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2a71c70e-3a49-4f5f-a891-145eea3e70b1" (UID: "2a71c70e-3a49-4f5f-a891-145eea3e70b1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.517730 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a71c70e-3a49-4f5f-a891-145eea3e70b1-kube-api-access-jbsmp" (OuterVolumeSpecName: "kube-api-access-jbsmp") pod "2a71c70e-3a49-4f5f-a891-145eea3e70b1" (UID: "2a71c70e-3a49-4f5f-a891-145eea3e70b1"). InnerVolumeSpecName "kube-api-access-jbsmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.612278 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a71c70e-3a49-4f5f-a891-145eea3e70b1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.612332 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbsmp\" (UniqueName: \"kubernetes.io/projected/2a71c70e-3a49-4f5f-a891-145eea3e70b1-kube-api-access-jbsmp\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.652676 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.664794 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ldc6d" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.670447 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xhbnc" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.691919 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2e7f-account-create-update-hm9vx" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.708579 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2mw49" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.713026 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg6b5\" (UniqueName: \"kubernetes.io/projected/e1554d19-8306-4834-86f0-24395c08de62-kube-api-access-qg6b5\") pod \"e1554d19-8306-4834-86f0-24395c08de62\" (UID: \"e1554d19-8306-4834-86f0-24395c08de62\") " Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.713105 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2569fc9-80e8-4c61-8513-8422d954e2de-operator-scripts\") pod \"c2569fc9-80e8-4c61-8513-8422d954e2de\" (UID: \"c2569fc9-80e8-4c61-8513-8422d954e2de\") " Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.713212 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24t69\" (UniqueName: \"kubernetes.io/projected/4448265e-9399-4ef2-9e41-bdba3fd2fe6a-kube-api-access-24t69\") pod \"4448265e-9399-4ef2-9e41-bdba3fd2fe6a\" (UID: \"4448265e-9399-4ef2-9e41-bdba3fd2fe6a\") " Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.713264 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g22fs\" (UniqueName: \"kubernetes.io/projected/c2569fc9-80e8-4c61-8513-8422d954e2de-kube-api-access-g22fs\") pod \"c2569fc9-80e8-4c61-8513-8422d954e2de\" (UID: \"c2569fc9-80e8-4c61-8513-8422d954e2de\") " Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.713310 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1554d19-8306-4834-86f0-24395c08de62-operator-scripts\") pod \"e1554d19-8306-4834-86f0-24395c08de62\" (UID: \"e1554d19-8306-4834-86f0-24395c08de62\") " Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.713379 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gp9xw\" (UniqueName: \"kubernetes.io/projected/055e8fd2-1d52-401b-acdd-c55bc568e4fa-kube-api-access-gp9xw\") pod \"055e8fd2-1d52-401b-acdd-c55bc568e4fa\" (UID: \"055e8fd2-1d52-401b-acdd-c55bc568e4fa\") " Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.713413 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4448265e-9399-4ef2-9e41-bdba3fd2fe6a-operator-scripts\") pod \"4448265e-9399-4ef2-9e41-bdba3fd2fe6a\" (UID: \"4448265e-9399-4ef2-9e41-bdba3fd2fe6a\") " Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.713482 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/055e8fd2-1d52-401b-acdd-c55bc568e4fa-operator-scripts\") pod \"055e8fd2-1d52-401b-acdd-c55bc568e4fa\" (UID: \"055e8fd2-1d52-401b-acdd-c55bc568e4fa\") " Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.713873 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2569fc9-80e8-4c61-8513-8422d954e2de-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c2569fc9-80e8-4c61-8513-8422d954e2de" (UID: "c2569fc9-80e8-4c61-8513-8422d954e2de"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.714428 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1554d19-8306-4834-86f0-24395c08de62-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e1554d19-8306-4834-86f0-24395c08de62" (UID: "e1554d19-8306-4834-86f0-24395c08de62"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.714566 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4448265e-9399-4ef2-9e41-bdba3fd2fe6a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4448265e-9399-4ef2-9e41-bdba3fd2fe6a" (UID: "4448265e-9399-4ef2-9e41-bdba3fd2fe6a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.714916 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/055e8fd2-1d52-401b-acdd-c55bc568e4fa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "055e8fd2-1d52-401b-acdd-c55bc568e4fa" (UID: "055e8fd2-1d52-401b-acdd-c55bc568e4fa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.716509 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1554d19-8306-4834-86f0-24395c08de62-kube-api-access-qg6b5" (OuterVolumeSpecName: "kube-api-access-qg6b5") pod "e1554d19-8306-4834-86f0-24395c08de62" (UID: "e1554d19-8306-4834-86f0-24395c08de62"). InnerVolumeSpecName "kube-api-access-qg6b5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.720921 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4448265e-9399-4ef2-9e41-bdba3fd2fe6a-kube-api-access-24t69" (OuterVolumeSpecName: "kube-api-access-24t69") pod "4448265e-9399-4ef2-9e41-bdba3fd2fe6a" (UID: "4448265e-9399-4ef2-9e41-bdba3fd2fe6a"). InnerVolumeSpecName "kube-api-access-24t69". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.730477 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/055e8fd2-1d52-401b-acdd-c55bc568e4fa-kube-api-access-gp9xw" (OuterVolumeSpecName: "kube-api-access-gp9xw") pod "055e8fd2-1d52-401b-acdd-c55bc568e4fa" (UID: "055e8fd2-1d52-401b-acdd-c55bc568e4fa"). InnerVolumeSpecName "kube-api-access-gp9xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.737702 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2569fc9-80e8-4c61-8513-8422d954e2de-kube-api-access-g22fs" (OuterVolumeSpecName: "kube-api-access-g22fs") pod "c2569fc9-80e8-4c61-8513-8422d954e2de" (UID: "c2569fc9-80e8-4c61-8513-8422d954e2de"). InnerVolumeSpecName "kube-api-access-g22fs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.815475 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13739dfc-aae8-4815-96e7-b59d41e09486-operator-scripts\") pod \"13739dfc-aae8-4815-96e7-b59d41e09486\" (UID: \"13739dfc-aae8-4815-96e7-b59d41e09486\") " Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.815920 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtjt5\" (UniqueName: \"kubernetes.io/projected/13739dfc-aae8-4815-96e7-b59d41e09486-kube-api-access-qtjt5\") pod \"13739dfc-aae8-4815-96e7-b59d41e09486\" (UID: \"13739dfc-aae8-4815-96e7-b59d41e09486\") " Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.816598 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13739dfc-aae8-4815-96e7-b59d41e09486-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "13739dfc-aae8-4815-96e7-b59d41e09486" (UID: "13739dfc-aae8-4815-96e7-b59d41e09486"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.816955 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13739dfc-aae8-4815-96e7-b59d41e09486-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.816979 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg6b5\" (UniqueName: \"kubernetes.io/projected/e1554d19-8306-4834-86f0-24395c08de62-kube-api-access-qg6b5\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.816993 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2569fc9-80e8-4c61-8513-8422d954e2de-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.817003 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24t69\" (UniqueName: \"kubernetes.io/projected/4448265e-9399-4ef2-9e41-bdba3fd2fe6a-kube-api-access-24t69\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.817012 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g22fs\" (UniqueName: \"kubernetes.io/projected/c2569fc9-80e8-4c61-8513-8422d954e2de-kube-api-access-g22fs\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.817021 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1554d19-8306-4834-86f0-24395c08de62-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.817031 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gp9xw\" (UniqueName: \"kubernetes.io/projected/055e8fd2-1d52-401b-acdd-c55bc568e4fa-kube-api-access-gp9xw\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.817042 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4448265e-9399-4ef2-9e41-bdba3fd2fe6a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.817051 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/055e8fd2-1d52-401b-acdd-c55bc568e4fa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.824486 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13739dfc-aae8-4815-96e7-b59d41e09486-kube-api-access-qtjt5" (OuterVolumeSpecName: "kube-api-access-qtjt5") pod "13739dfc-aae8-4815-96e7-b59d41e09486" (UID: "13739dfc-aae8-4815-96e7-b59d41e09486"). InnerVolumeSpecName "kube-api-access-qtjt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:23 crc kubenswrapper[5002]: I1203 18:10:23.919045 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtjt5\" (UniqueName: \"kubernetes.io/projected/13739dfc-aae8-4815-96e7-b59d41e09486-kube-api-access-qtjt5\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.076935 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" event={"ID":"4448265e-9399-4ef2-9e41-bdba3fd2fe6a","Type":"ContainerDied","Data":"aaa7ab07d637c43540464dd36d96a26ff69c1145f2edbb578d416c9b550afc90"} Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.077161 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aaa7ab07d637c43540464dd36d96a26ff69c1145f2edbb578d416c9b550afc90" Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.076959 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9ba0-account-create-update-jxxpd" Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.082196 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2e7f-account-create-update-hm9vx" event={"ID":"e1554d19-8306-4834-86f0-24395c08de62","Type":"ContainerDied","Data":"4d208b5b92513faa4d48c8b0e2354d8b86a9432a2f8cb34776dcf4ed27b0c357"} Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.082237 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d208b5b92513faa4d48c8b0e2354d8b86a9432a2f8cb34776dcf4ed27b0c357" Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.082845 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2e7f-account-create-update-hm9vx" Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.084174 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ldc6d" Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.084188 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ldc6d" event={"ID":"c2569fc9-80e8-4c61-8513-8422d954e2de","Type":"ContainerDied","Data":"3b14649f0081abbfc5d527dc85835bf7e16e5b689459f44f2c6172ae9a13b19c"} Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.084354 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b14649f0081abbfc5d527dc85835bf7e16e5b689459f44f2c6172ae9a13b19c" Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.086902 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xhbnc" event={"ID":"055e8fd2-1d52-401b-acdd-c55bc568e4fa","Type":"ContainerDied","Data":"09c2698296f3ca3890ef3359e7b7f683776d941baff468e93659cce753f3b078"} Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.086929 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09c2698296f3ca3890ef3359e7b7f683776d941baff468e93659cce753f3b078" Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.087237 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xhbnc" Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.088928 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1875-account-create-update-fx5zd" Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.088950 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1875-account-create-update-fx5zd" event={"ID":"2a71c70e-3a49-4f5f-a891-145eea3e70b1","Type":"ContainerDied","Data":"df9b3fc18e43dd327c608f6877ad859e0d9cc5c3988e06a73b7f2c79bb0dab0f"} Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.089011 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df9b3fc18e43dd327c608f6877ad859e0d9cc5c3988e06a73b7f2c79bb0dab0f" Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.090615 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2mw49" event={"ID":"13739dfc-aae8-4815-96e7-b59d41e09486","Type":"ContainerDied","Data":"3a8f0e2401667ea5a5f234f8bb98b7add73c86266e6263c8fd2628524655bb7d"} Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.090650 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a8f0e2401667ea5a5f234f8bb98b7add73c86266e6263c8fd2628524655bb7d" Dec 03 18:10:24 crc kubenswrapper[5002]: I1203 18:10:24.090737 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2mw49" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.196928 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l5f9c"] Dec 03 18:10:25 crc kubenswrapper[5002]: E1203 18:10:25.197658 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="055e8fd2-1d52-401b-acdd-c55bc568e4fa" containerName="mariadb-database-create" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.197675 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="055e8fd2-1d52-401b-acdd-c55bc568e4fa" containerName="mariadb-database-create" Dec 03 18:10:25 crc kubenswrapper[5002]: E1203 18:10:25.197699 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2569fc9-80e8-4c61-8513-8422d954e2de" containerName="mariadb-database-create" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.197708 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2569fc9-80e8-4c61-8513-8422d954e2de" containerName="mariadb-database-create" Dec 03 18:10:25 crc kubenswrapper[5002]: E1203 18:10:25.197728 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13739dfc-aae8-4815-96e7-b59d41e09486" containerName="mariadb-database-create" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.197738 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="13739dfc-aae8-4815-96e7-b59d41e09486" containerName="mariadb-database-create" Dec 03 18:10:25 crc kubenswrapper[5002]: E1203 18:10:25.197784 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a71c70e-3a49-4f5f-a891-145eea3e70b1" containerName="mariadb-account-create-update" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.197793 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a71c70e-3a49-4f5f-a891-145eea3e70b1" containerName="mariadb-account-create-update" Dec 03 18:10:25 crc kubenswrapper[5002]: E1203 18:10:25.197810 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1554d19-8306-4834-86f0-24395c08de62" containerName="mariadb-account-create-update" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.197818 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1554d19-8306-4834-86f0-24395c08de62" containerName="mariadb-account-create-update" Dec 03 18:10:25 crc kubenswrapper[5002]: E1203 18:10:25.197832 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4448265e-9399-4ef2-9e41-bdba3fd2fe6a" containerName="mariadb-account-create-update" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.197841 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="4448265e-9399-4ef2-9e41-bdba3fd2fe6a" containerName="mariadb-account-create-update" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.198047 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="055e8fd2-1d52-401b-acdd-c55bc568e4fa" containerName="mariadb-database-create" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.198063 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="13739dfc-aae8-4815-96e7-b59d41e09486" containerName="mariadb-database-create" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.198082 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1554d19-8306-4834-86f0-24395c08de62" containerName="mariadb-account-create-update" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.198096 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2569fc9-80e8-4c61-8513-8422d954e2de" containerName="mariadb-database-create" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.198111 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a71c70e-3a49-4f5f-a891-145eea3e70b1" containerName="mariadb-account-create-update" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.198125 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="4448265e-9399-4ef2-9e41-bdba3fd2fe6a" containerName="mariadb-account-create-update" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.198814 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.202474 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.202806 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.203322 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-tr99b" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.221056 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l5f9c"] Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.240215 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwvds\" (UniqueName: \"kubernetes.io/projected/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-kube-api-access-zwvds\") pod \"nova-cell0-conductor-db-sync-l5f9c\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.240271 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-scripts\") pod \"nova-cell0-conductor-db-sync-l5f9c\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.240359 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-l5f9c\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.240413 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-config-data\") pod \"nova-cell0-conductor-db-sync-l5f9c\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.342278 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwvds\" (UniqueName: \"kubernetes.io/projected/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-kube-api-access-zwvds\") pod \"nova-cell0-conductor-db-sync-l5f9c\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.342349 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-scripts\") pod \"nova-cell0-conductor-db-sync-l5f9c\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.342400 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-l5f9c\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.342442 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-config-data\") pod \"nova-cell0-conductor-db-sync-l5f9c\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.346945 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-scripts\") pod \"nova-cell0-conductor-db-sync-l5f9c\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.347385 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-config-data\") pod \"nova-cell0-conductor-db-sync-l5f9c\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.351691 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-l5f9c\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.364199 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwvds\" (UniqueName: \"kubernetes.io/projected/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-kube-api-access-zwvds\") pod \"nova-cell0-conductor-db-sync-l5f9c\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.542567 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:25 crc kubenswrapper[5002]: I1203 18:10:25.987948 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l5f9c"] Dec 03 18:10:25 crc kubenswrapper[5002]: W1203 18:10:25.991433 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2fdc5c0_aa4d_4bb3_a9c7_8ed9d5808952.slice/crio-f146028c9a323c94b63f01810ab8def60f0bb7ac91d78c7195ceeebc21b52f5a WatchSource:0}: Error finding container f146028c9a323c94b63f01810ab8def60f0bb7ac91d78c7195ceeebc21b52f5a: Status 404 returned error can't find the container with id f146028c9a323c94b63f01810ab8def60f0bb7ac91d78c7195ceeebc21b52f5a Dec 03 18:10:26 crc kubenswrapper[5002]: I1203 18:10:26.124477 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l5f9c" event={"ID":"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952","Type":"ContainerStarted","Data":"f146028c9a323c94b63f01810ab8def60f0bb7ac91d78c7195ceeebc21b52f5a"} Dec 03 18:10:27 crc kubenswrapper[5002]: I1203 18:10:27.137513 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l5f9c" event={"ID":"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952","Type":"ContainerStarted","Data":"a733986b48e0f939b974aa88e70f14245f3a7035998271f6ca1951145d6c8311"} Dec 03 18:10:27 crc kubenswrapper[5002]: I1203 18:10:27.166948 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-l5f9c" podStartSLOduration=2.166921669 podStartE2EDuration="2.166921669s" podCreationTimestamp="2025-12-03 18:10:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:27.155272146 +0000 UTC m=+5950.569094064" watchObservedRunningTime="2025-12-03 18:10:27.166921669 +0000 UTC m=+5950.580743577" Dec 03 18:10:32 crc kubenswrapper[5002]: I1203 18:10:32.186998 5002 generic.go:334] "Generic (PLEG): container finished" podID="d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952" containerID="a733986b48e0f939b974aa88e70f14245f3a7035998271f6ca1951145d6c8311" exitCode=0 Dec 03 18:10:32 crc kubenswrapper[5002]: I1203 18:10:32.187089 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l5f9c" event={"ID":"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952","Type":"ContainerDied","Data":"a733986b48e0f939b974aa88e70f14245f3a7035998271f6ca1951145d6c8311"} Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.673568 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.826441 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-config-data\") pod \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.826501 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-scripts\") pod \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.826576 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwvds\" (UniqueName: \"kubernetes.io/projected/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-kube-api-access-zwvds\") pod \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.826637 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-combined-ca-bundle\") pod \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\" (UID: \"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952\") " Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.832385 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-kube-api-access-zwvds" (OuterVolumeSpecName: "kube-api-access-zwvds") pod "d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952" (UID: "d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952"). InnerVolumeSpecName "kube-api-access-zwvds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.833164 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-scripts" (OuterVolumeSpecName: "scripts") pod "d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952" (UID: "d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.853479 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-config-data" (OuterVolumeSpecName: "config-data") pod "d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952" (UID: "d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.860691 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952" (UID: "d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.930165 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.930208 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.930223 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwvds\" (UniqueName: \"kubernetes.io/projected/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-kube-api-access-zwvds\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:33 crc kubenswrapper[5002]: I1203 18:10:33.930239 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.212018 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l5f9c" event={"ID":"d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952","Type":"ContainerDied","Data":"f146028c9a323c94b63f01810ab8def60f0bb7ac91d78c7195ceeebc21b52f5a"} Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.212069 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f146028c9a323c94b63f01810ab8def60f0bb7ac91d78c7195ceeebc21b52f5a" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.212068 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l5f9c" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.302186 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 18:10:34 crc kubenswrapper[5002]: E1203 18:10:34.302593 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952" containerName="nova-cell0-conductor-db-sync" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.302612 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952" containerName="nova-cell0-conductor-db-sync" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.302802 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952" containerName="nova-cell0-conductor-db-sync" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.303391 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.305497 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-tr99b" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.306036 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.315714 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.437958 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db10677-c084-4264-bcc2-d8db607ce26b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9db10677-c084-4264-bcc2-d8db607ce26b\") " pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.438037 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kknd4\" (UniqueName: \"kubernetes.io/projected/9db10677-c084-4264-bcc2-d8db607ce26b-kube-api-access-kknd4\") pod \"nova-cell0-conductor-0\" (UID: \"9db10677-c084-4264-bcc2-d8db607ce26b\") " pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.438074 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9db10677-c084-4264-bcc2-d8db607ce26b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9db10677-c084-4264-bcc2-d8db607ce26b\") " pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.540562 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db10677-c084-4264-bcc2-d8db607ce26b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9db10677-c084-4264-bcc2-d8db607ce26b\") " pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.540691 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kknd4\" (UniqueName: \"kubernetes.io/projected/9db10677-c084-4264-bcc2-d8db607ce26b-kube-api-access-kknd4\") pod \"nova-cell0-conductor-0\" (UID: \"9db10677-c084-4264-bcc2-d8db607ce26b\") " pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.541361 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9db10677-c084-4264-bcc2-d8db607ce26b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9db10677-c084-4264-bcc2-d8db607ce26b\") " pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.544814 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db10677-c084-4264-bcc2-d8db607ce26b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9db10677-c084-4264-bcc2-d8db607ce26b\") " pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.546922 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9db10677-c084-4264-bcc2-d8db607ce26b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9db10677-c084-4264-bcc2-d8db607ce26b\") " pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.560660 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kknd4\" (UniqueName: \"kubernetes.io/projected/9db10677-c084-4264-bcc2-d8db607ce26b-kube-api-access-kknd4\") pod \"nova-cell0-conductor-0\" (UID: \"9db10677-c084-4264-bcc2-d8db607ce26b\") " pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:34 crc kubenswrapper[5002]: I1203 18:10:34.635318 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:35 crc kubenswrapper[5002]: I1203 18:10:35.139107 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 18:10:35 crc kubenswrapper[5002]: I1203 18:10:35.232696 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9db10677-c084-4264-bcc2-d8db607ce26b","Type":"ContainerStarted","Data":"bb7e5a1cc293b29a5e9b422ac8f9d5de898f7d93066839ade10a5c9b0e1529b2"} Dec 03 18:10:36 crc kubenswrapper[5002]: I1203 18:10:36.241350 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9db10677-c084-4264-bcc2-d8db607ce26b","Type":"ContainerStarted","Data":"e16fea531bbf0c3dda638982f7e53af7e61d2b2948dedbf707af2e618a9e92fb"} Dec 03 18:10:36 crc kubenswrapper[5002]: I1203 18:10:36.242392 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:36 crc kubenswrapper[5002]: I1203 18:10:36.266109 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.26608916 podStartE2EDuration="2.26608916s" podCreationTimestamp="2025-12-03 18:10:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:36.261849546 +0000 UTC m=+5959.675671444" watchObservedRunningTime="2025-12-03 18:10:36.26608916 +0000 UTC m=+5959.679911048" Dec 03 18:10:44 crc kubenswrapper[5002]: I1203 18:10:44.681670 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.138314 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-xw92n"] Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.139659 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.144257 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.144365 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.150946 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-xw92n"] Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.175395 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-xw92n\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.175449 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-config-data\") pod \"nova-cell0-cell-mapping-xw92n\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.175582 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmq94\" (UniqueName: \"kubernetes.io/projected/954c519e-193b-486a-9b88-fc780ef7877b-kube-api-access-lmq94\") pod \"nova-cell0-cell-mapping-xw92n\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.175664 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-scripts\") pod \"nova-cell0-cell-mapping-xw92n\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.276825 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-xw92n\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.276877 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-config-data\") pod \"nova-cell0-cell-mapping-xw92n\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.276976 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmq94\" (UniqueName: \"kubernetes.io/projected/954c519e-193b-486a-9b88-fc780ef7877b-kube-api-access-lmq94\") pod \"nova-cell0-cell-mapping-xw92n\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.277035 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-scripts\") pod \"nova-cell0-cell-mapping-xw92n\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.292395 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-scripts\") pod \"nova-cell0-cell-mapping-xw92n\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.293507 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-config-data\") pod \"nova-cell0-cell-mapping-xw92n\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.303030 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-xw92n\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.319621 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmq94\" (UniqueName: \"kubernetes.io/projected/954c519e-193b-486a-9b88-fc780ef7877b-kube-api-access-lmq94\") pod \"nova-cell0-cell-mapping-xw92n\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.345244 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.373711 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.404761 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.404862 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dz49\" (UniqueName: \"kubernetes.io/projected/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-kube-api-access-4dz49\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.404888 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.431994 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.439121 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.475233 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.476344 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.490691 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.501618 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.506612 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dz49\" (UniqueName: \"kubernetes.io/projected/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-kube-api-access-4dz49\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.506870 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.507868 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.526825 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.533847 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.534039 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.564563 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dz49\" (UniqueName: \"kubernetes.io/projected/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-kube-api-access-4dz49\") pod \"nova-cell1-novncproxy-0\" (UID: \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.572024 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.573707 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.575852 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.611325 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b512976e-6b0e-42b0-ad19-94faff1e0149-config-data\") pod \"nova-metadata-0\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.611367 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6724n\" (UniqueName: \"kubernetes.io/projected/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-kube-api-access-6724n\") pod \"nova-api-0\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.611423 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b512976e-6b0e-42b0-ad19-94faff1e0149-logs\") pod \"nova-metadata-0\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.611441 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm5mw\" (UniqueName: \"kubernetes.io/projected/b512976e-6b0e-42b0-ad19-94faff1e0149-kube-api-access-xm5mw\") pod \"nova-metadata-0\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.611525 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-logs\") pod \"nova-api-0\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.611549 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b512976e-6b0e-42b0-ad19-94faff1e0149-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.611583 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-config-data\") pod \"nova-api-0\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.611600 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.612935 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.614409 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.630456 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.648815 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.688511 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.714106 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-logs\") pod \"nova-api-0\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.714162 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b512976e-6b0e-42b0-ad19-94faff1e0149-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.714189 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpxh4\" (UniqueName: \"kubernetes.io/projected/11897236-6398-4f41-a242-45dc4af738d0-kube-api-access-dpxh4\") pod \"nova-scheduler-0\" (UID: \"11897236-6398-4f41-a242-45dc4af738d0\") " pod="openstack/nova-scheduler-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.714223 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-config-data\") pod \"nova-api-0\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.714240 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.714260 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11897236-6398-4f41-a242-45dc4af738d0-config-data\") pod \"nova-scheduler-0\" (UID: \"11897236-6398-4f41-a242-45dc4af738d0\") " pod="openstack/nova-scheduler-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.714297 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b512976e-6b0e-42b0-ad19-94faff1e0149-config-data\") pod \"nova-metadata-0\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.714314 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6724n\" (UniqueName: \"kubernetes.io/projected/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-kube-api-access-6724n\") pod \"nova-api-0\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.714342 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11897236-6398-4f41-a242-45dc4af738d0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"11897236-6398-4f41-a242-45dc4af738d0\") " pod="openstack/nova-scheduler-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.714375 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b512976e-6b0e-42b0-ad19-94faff1e0149-logs\") pod \"nova-metadata-0\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.714392 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm5mw\" (UniqueName: \"kubernetes.io/projected/b512976e-6b0e-42b0-ad19-94faff1e0149-kube-api-access-xm5mw\") pod \"nova-metadata-0\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.715177 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-logs\") pod \"nova-api-0\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.718841 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-config-data\") pod \"nova-api-0\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.719136 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b512976e-6b0e-42b0-ad19-94faff1e0149-logs\") pod \"nova-metadata-0\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.742819 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77df6d7ff9-l944g"] Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.743349 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.743956 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b512976e-6b0e-42b0-ad19-94faff1e0149-config-data\") pod \"nova-metadata-0\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.744601 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.755169 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm5mw\" (UniqueName: \"kubernetes.io/projected/b512976e-6b0e-42b0-ad19-94faff1e0149-kube-api-access-xm5mw\") pod \"nova-metadata-0\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.756800 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b512976e-6b0e-42b0-ad19-94faff1e0149-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.756866 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77df6d7ff9-l944g"] Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.760907 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.764616 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6724n\" (UniqueName: \"kubernetes.io/projected/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-kube-api-access-6724n\") pod \"nova-api-0\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " pod="openstack/nova-api-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.825973 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-ovsdbserver-nb\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.826045 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpxh4\" (UniqueName: \"kubernetes.io/projected/11897236-6398-4f41-a242-45dc4af738d0-kube-api-access-dpxh4\") pod \"nova-scheduler-0\" (UID: \"11897236-6398-4f41-a242-45dc4af738d0\") " pod="openstack/nova-scheduler-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.826087 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11897236-6398-4f41-a242-45dc4af738d0-config-data\") pod \"nova-scheduler-0\" (UID: \"11897236-6398-4f41-a242-45dc4af738d0\") " pod="openstack/nova-scheduler-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.826109 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-dns-svc\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.826128 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-ovsdbserver-sb\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.826168 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-config\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.826191 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11897236-6398-4f41-a242-45dc4af738d0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"11897236-6398-4f41-a242-45dc4af738d0\") " pod="openstack/nova-scheduler-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.826215 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjq7c\" (UniqueName: \"kubernetes.io/projected/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-kube-api-access-wjq7c\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.829853 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11897236-6398-4f41-a242-45dc4af738d0-config-data\") pod \"nova-scheduler-0\" (UID: \"11897236-6398-4f41-a242-45dc4af738d0\") " pod="openstack/nova-scheduler-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.839375 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11897236-6398-4f41-a242-45dc4af738d0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"11897236-6398-4f41-a242-45dc4af738d0\") " pod="openstack/nova-scheduler-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.856389 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpxh4\" (UniqueName: \"kubernetes.io/projected/11897236-6398-4f41-a242-45dc4af738d0-kube-api-access-dpxh4\") pod \"nova-scheduler-0\" (UID: \"11897236-6398-4f41-a242-45dc4af738d0\") " pod="openstack/nova-scheduler-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.927888 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-ovsdbserver-nb\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.928198 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-dns-svc\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.928218 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-ovsdbserver-sb\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.928258 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-config\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.928290 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjq7c\" (UniqueName: \"kubernetes.io/projected/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-kube-api-access-wjq7c\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.929459 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-ovsdbserver-nb\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.929962 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-dns-svc\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.930501 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-ovsdbserver-sb\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.930931 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.936317 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-config\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:45 crc kubenswrapper[5002]: I1203 18:10:45.953190 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjq7c\" (UniqueName: \"kubernetes.io/projected/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-kube-api-access-wjq7c\") pod \"dnsmasq-dns-77df6d7ff9-l944g\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.077694 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.089876 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.099453 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.369819 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-xw92n"] Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.391726 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:46 crc kubenswrapper[5002]: W1203 18:10:46.407466 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb512976e_6b0e_42b0_ad19_94faff1e0149.slice/crio-ecac6f9be7c29d172df3e1471480acd990efea05d2d6615bc59ffd1799e782af WatchSource:0}: Error finding container ecac6f9be7c29d172df3e1471480acd990efea05d2d6615bc59ffd1799e782af: Status 404 returned error can't find the container with id ecac6f9be7c29d172df3e1471480acd990efea05d2d6615bc59ffd1799e782af Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.442668 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xw92n" event={"ID":"954c519e-193b-486a-9b88-fc780ef7877b","Type":"ContainerStarted","Data":"69660d3115dd682299ba024077193e2a410ef216fbb830677b4674416b6bb80c"} Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.448049 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b512976e-6b0e-42b0-ad19-94faff1e0149","Type":"ContainerStarted","Data":"ecac6f9be7c29d172df3e1471480acd990efea05d2d6615bc59ffd1799e782af"} Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.470949 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.537587 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.669343 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-sbg8b"] Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.670667 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.673554 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.674293 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.693211 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-sbg8b"] Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.753460 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-config-data\") pod \"nova-cell1-conductor-db-sync-sbg8b\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.753689 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn47c\" (UniqueName: \"kubernetes.io/projected/67230eb7-7694-4dd0-9f46-e05364abfc60-kube-api-access-nn47c\") pod \"nova-cell1-conductor-db-sync-sbg8b\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.753891 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-sbg8b\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.754112 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-scripts\") pod \"nova-cell1-conductor-db-sync-sbg8b\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.860145 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn47c\" (UniqueName: \"kubernetes.io/projected/67230eb7-7694-4dd0-9f46-e05364abfc60-kube-api-access-nn47c\") pod \"nova-cell1-conductor-db-sync-sbg8b\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.860681 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-sbg8b\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.860759 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-scripts\") pod \"nova-cell1-conductor-db-sync-sbg8b\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.860828 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-config-data\") pod \"nova-cell1-conductor-db-sync-sbg8b\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.865027 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-scripts\") pod \"nova-cell1-conductor-db-sync-sbg8b\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.879577 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn47c\" (UniqueName: \"kubernetes.io/projected/67230eb7-7694-4dd0-9f46-e05364abfc60-kube-api-access-nn47c\") pod \"nova-cell1-conductor-db-sync-sbg8b\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: W1203 18:10:46.879785 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b7e9b68_a2b3_40b1_9b3c_6d7e91978965.slice/crio-53b3e8a385e356989694594536d46b97a1c1d7728634fbad89dc6396a0167d4c WatchSource:0}: Error finding container 53b3e8a385e356989694594536d46b97a1c1d7728634fbad89dc6396a0167d4c: Status 404 returned error can't find the container with id 53b3e8a385e356989694594536d46b97a1c1d7728634fbad89dc6396a0167d4c Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.880326 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-config-data\") pod \"nova-cell1-conductor-db-sync-sbg8b\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.889782 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-sbg8b\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.897492 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77df6d7ff9-l944g"] Dec 03 18:10:46 crc kubenswrapper[5002]: I1203 18:10:46.945169 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 18:10:46 crc kubenswrapper[5002]: W1203 18:10:46.951862 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11897236_6398_4f41_a242_45dc4af738d0.slice/crio-48a1289c53dd19d3ee8e870aa932073a553eadf6f6ab3b0d1c27abc9d4e5fe1b WatchSource:0}: Error finding container 48a1289c53dd19d3ee8e870aa932073a553eadf6f6ab3b0d1c27abc9d4e5fe1b: Status 404 returned error can't find the container with id 48a1289c53dd19d3ee8e870aa932073a553eadf6f6ab3b0d1c27abc9d4e5fe1b Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.014411 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.471411 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b512976e-6b0e-42b0-ad19-94faff1e0149","Type":"ContainerStarted","Data":"f4e0e79ff411a5a28b2f34c5a68664b522986f8c243496f20932842cdffa8a69"} Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.472111 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b512976e-6b0e-42b0-ad19-94faff1e0149","Type":"ContainerStarted","Data":"290313c4a5f34888faec297ca982fce8169e7565366b729b0a1366da2712ded8"} Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.475446 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"83cb2af8-6b26-4e89-b9ed-38cb1ad77596","Type":"ContainerStarted","Data":"e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2"} Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.475506 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"83cb2af8-6b26-4e89-b9ed-38cb1ad77596","Type":"ContainerStarted","Data":"ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04"} Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.475520 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"83cb2af8-6b26-4e89-b9ed-38cb1ad77596","Type":"ContainerStarted","Data":"7091616ac1b6738860c9e90ffcdfd05ce1246701ec6fd08af7f7d5a11a17d6df"} Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.483571 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"11897236-6398-4f41-a242-45dc4af738d0","Type":"ContainerStarted","Data":"638964fc94eda4af2ea8e4b3cf7875950f0ff1a90288f7221d0b57ec7184eb57"} Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.484916 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"11897236-6398-4f41-a242-45dc4af738d0","Type":"ContainerStarted","Data":"48a1289c53dd19d3ee8e870aa932073a553eadf6f6ab3b0d1c27abc9d4e5fe1b"} Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.491238 5002 generic.go:334] "Generic (PLEG): container finished" podID="8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" containerID="a637488e4973b57fca4db7fe782eb9c62fb70f159d75cbdb21e2124d3c8b10fd" exitCode=0 Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.491341 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" event={"ID":"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965","Type":"ContainerDied","Data":"a637488e4973b57fca4db7fe782eb9c62fb70f159d75cbdb21e2124d3c8b10fd"} Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.491368 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" event={"ID":"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965","Type":"ContainerStarted","Data":"53b3e8a385e356989694594536d46b97a1c1d7728634fbad89dc6396a0167d4c"} Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.491603 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.4915851829999998 podStartE2EDuration="2.491585183s" podCreationTimestamp="2025-12-03 18:10:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:47.489539458 +0000 UTC m=+5970.903361356" watchObservedRunningTime="2025-12-03 18:10:47.491585183 +0000 UTC m=+5970.905407071" Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.520564 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xw92n" event={"ID":"954c519e-193b-486a-9b88-fc780ef7877b","Type":"ContainerStarted","Data":"37069fe5a03d001c1bbc4d0afec937eaaea8661e9b1ef1a2a1d7af1c7aef6606"} Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.530406 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf","Type":"ContainerStarted","Data":"8a87e07158ff1e55530706f83e84fb0ed60deea850921a5745f565de29165f0d"} Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.530488 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf","Type":"ContainerStarted","Data":"2264ba429db991d786f42e4552ac54e8d981e96b7ebce74c0e1a8686f4280764"} Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.539276 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.539254465 podStartE2EDuration="2.539254465s" podCreationTimestamp="2025-12-03 18:10:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:47.516534514 +0000 UTC m=+5970.930356402" watchObservedRunningTime="2025-12-03 18:10:47.539254465 +0000 UTC m=+5970.953076363" Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.587248 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.587229066 podStartE2EDuration="2.587229066s" podCreationTimestamp="2025-12-03 18:10:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:47.53794791 +0000 UTC m=+5970.951769808" watchObservedRunningTime="2025-12-03 18:10:47.587229066 +0000 UTC m=+5971.001050954" Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.612080 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-xw92n" podStartSLOduration=2.612061154 podStartE2EDuration="2.612061154s" podCreationTimestamp="2025-12-03 18:10:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:47.582040107 +0000 UTC m=+5970.995861995" watchObservedRunningTime="2025-12-03 18:10:47.612061154 +0000 UTC m=+5971.025883042" Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.629812 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-sbg8b"] Dec 03 18:10:47 crc kubenswrapper[5002]: I1203 18:10:47.635375 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.635355261 podStartE2EDuration="2.635355261s" podCreationTimestamp="2025-12-03 18:10:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:47.614165901 +0000 UTC m=+5971.027987789" watchObservedRunningTime="2025-12-03 18:10:47.635355261 +0000 UTC m=+5971.049177149" Dec 03 18:10:48 crc kubenswrapper[5002]: I1203 18:10:48.543463 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" event={"ID":"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965","Type":"ContainerStarted","Data":"de5a4bbe1a6a67967de86ec2b7070676b671e9d6acbd35930c0a654261bbf5f4"} Dec 03 18:10:48 crc kubenswrapper[5002]: I1203 18:10:48.543768 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:48 crc kubenswrapper[5002]: I1203 18:10:48.545406 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-sbg8b" event={"ID":"67230eb7-7694-4dd0-9f46-e05364abfc60","Type":"ContainerStarted","Data":"d007cc7fa615dddf284df24864c0e4f352d7a2864995f64799a61502b1eb6ac7"} Dec 03 18:10:48 crc kubenswrapper[5002]: I1203 18:10:48.545443 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-sbg8b" event={"ID":"67230eb7-7694-4dd0-9f46-e05364abfc60","Type":"ContainerStarted","Data":"d9b64aba8db0fd6bc6094c1479c54c85ffcb54e031fd4f8ddbe96d974cd174e6"} Dec 03 18:10:48 crc kubenswrapper[5002]: I1203 18:10:48.565013 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" podStartSLOduration=3.564987964 podStartE2EDuration="3.564987964s" podCreationTimestamp="2025-12-03 18:10:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:48.561557211 +0000 UTC m=+5971.975379149" watchObservedRunningTime="2025-12-03 18:10:48.564987964 +0000 UTC m=+5971.978809852" Dec 03 18:10:48 crc kubenswrapper[5002]: I1203 18:10:48.606843 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-sbg8b" podStartSLOduration=2.60681979 podStartE2EDuration="2.60681979s" podCreationTimestamp="2025-12-03 18:10:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:48.579055043 +0000 UTC m=+5971.992876931" watchObservedRunningTime="2025-12-03 18:10:48.60681979 +0000 UTC m=+5972.020641678" Dec 03 18:10:49 crc kubenswrapper[5002]: I1203 18:10:49.777131 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:49 crc kubenswrapper[5002]: I1203 18:10:49.777668 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b512976e-6b0e-42b0-ad19-94faff1e0149" containerName="nova-metadata-log" containerID="cri-o://290313c4a5f34888faec297ca982fce8169e7565366b729b0a1366da2712ded8" gracePeriod=30 Dec 03 18:10:49 crc kubenswrapper[5002]: I1203 18:10:49.781283 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b512976e-6b0e-42b0-ad19-94faff1e0149" containerName="nova-metadata-metadata" containerID="cri-o://f4e0e79ff411a5a28b2f34c5a68664b522986f8c243496f20932842cdffa8a69" gracePeriod=30 Dec 03 18:10:49 crc kubenswrapper[5002]: I1203 18:10:49.834932 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 18:10:49 crc kubenswrapper[5002]: I1203 18:10:49.835120 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="e5293971-dcb4-4cb2-9a98-a4146d2cb8cf" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://8a87e07158ff1e55530706f83e84fb0ed60deea850921a5745f565de29165f0d" gracePeriod=30 Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.579925 5002 generic.go:334] "Generic (PLEG): container finished" podID="e5293971-dcb4-4cb2-9a98-a4146d2cb8cf" containerID="8a87e07158ff1e55530706f83e84fb0ed60deea850921a5745f565de29165f0d" exitCode=0 Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.580068 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf","Type":"ContainerDied","Data":"8a87e07158ff1e55530706f83e84fb0ed60deea850921a5745f565de29165f0d"} Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.587172 5002 generic.go:334] "Generic (PLEG): container finished" podID="b512976e-6b0e-42b0-ad19-94faff1e0149" containerID="f4e0e79ff411a5a28b2f34c5a68664b522986f8c243496f20932842cdffa8a69" exitCode=0 Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.587193 5002 generic.go:334] "Generic (PLEG): container finished" podID="b512976e-6b0e-42b0-ad19-94faff1e0149" containerID="290313c4a5f34888faec297ca982fce8169e7565366b729b0a1366da2712ded8" exitCode=143 Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.587211 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b512976e-6b0e-42b0-ad19-94faff1e0149","Type":"ContainerDied","Data":"f4e0e79ff411a5a28b2f34c5a68664b522986f8c243496f20932842cdffa8a69"} Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.587234 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b512976e-6b0e-42b0-ad19-94faff1e0149","Type":"ContainerDied","Data":"290313c4a5f34888faec297ca982fce8169e7565366b729b0a1366da2712ded8"} Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.685515 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.695234 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.845702 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dz49\" (UniqueName: \"kubernetes.io/projected/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-kube-api-access-4dz49\") pod \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\" (UID: \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\") " Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.845902 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xm5mw\" (UniqueName: \"kubernetes.io/projected/b512976e-6b0e-42b0-ad19-94faff1e0149-kube-api-access-xm5mw\") pod \"b512976e-6b0e-42b0-ad19-94faff1e0149\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.845932 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b512976e-6b0e-42b0-ad19-94faff1e0149-config-data\") pod \"b512976e-6b0e-42b0-ad19-94faff1e0149\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.846000 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-config-data\") pod \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\" (UID: \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\") " Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.846215 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b512976e-6b0e-42b0-ad19-94faff1e0149-combined-ca-bundle\") pod \"b512976e-6b0e-42b0-ad19-94faff1e0149\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.846239 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-combined-ca-bundle\") pod \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\" (UID: \"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf\") " Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.846775 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b512976e-6b0e-42b0-ad19-94faff1e0149-logs\") pod \"b512976e-6b0e-42b0-ad19-94faff1e0149\" (UID: \"b512976e-6b0e-42b0-ad19-94faff1e0149\") " Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.847583 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b512976e-6b0e-42b0-ad19-94faff1e0149-logs" (OuterVolumeSpecName: "logs") pod "b512976e-6b0e-42b0-ad19-94faff1e0149" (UID: "b512976e-6b0e-42b0-ad19-94faff1e0149"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.852894 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-kube-api-access-4dz49" (OuterVolumeSpecName: "kube-api-access-4dz49") pod "e5293971-dcb4-4cb2-9a98-a4146d2cb8cf" (UID: "e5293971-dcb4-4cb2-9a98-a4146d2cb8cf"). InnerVolumeSpecName "kube-api-access-4dz49". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.853941 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b512976e-6b0e-42b0-ad19-94faff1e0149-kube-api-access-xm5mw" (OuterVolumeSpecName: "kube-api-access-xm5mw") pod "b512976e-6b0e-42b0-ad19-94faff1e0149" (UID: "b512976e-6b0e-42b0-ad19-94faff1e0149"). InnerVolumeSpecName "kube-api-access-xm5mw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.875872 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b512976e-6b0e-42b0-ad19-94faff1e0149-config-data" (OuterVolumeSpecName: "config-data") pod "b512976e-6b0e-42b0-ad19-94faff1e0149" (UID: "b512976e-6b0e-42b0-ad19-94faff1e0149"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.883070 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b512976e-6b0e-42b0-ad19-94faff1e0149-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b512976e-6b0e-42b0-ad19-94faff1e0149" (UID: "b512976e-6b0e-42b0-ad19-94faff1e0149"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.898023 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e5293971-dcb4-4cb2-9a98-a4146d2cb8cf" (UID: "e5293971-dcb4-4cb2-9a98-a4146d2cb8cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.908607 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-config-data" (OuterVolumeSpecName: "config-data") pod "e5293971-dcb4-4cb2-9a98-a4146d2cb8cf" (UID: "e5293971-dcb4-4cb2-9a98-a4146d2cb8cf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.916260 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.916318 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.949097 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b512976e-6b0e-42b0-ad19-94faff1e0149-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.949141 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.949156 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b512976e-6b0e-42b0-ad19-94faff1e0149-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.949172 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dz49\" (UniqueName: \"kubernetes.io/projected/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-kube-api-access-4dz49\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.949187 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xm5mw\" (UniqueName: \"kubernetes.io/projected/b512976e-6b0e-42b0-ad19-94faff1e0149-kube-api-access-xm5mw\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.949200 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b512976e-6b0e-42b0-ad19-94faff1e0149-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:50 crc kubenswrapper[5002]: I1203 18:10:50.949214 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.090975 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.597082 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b512976e-6b0e-42b0-ad19-94faff1e0149","Type":"ContainerDied","Data":"ecac6f9be7c29d172df3e1471480acd990efea05d2d6615bc59ffd1799e782af"} Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.597160 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.597352 5002 scope.go:117] "RemoveContainer" containerID="f4e0e79ff411a5a28b2f34c5a68664b522986f8c243496f20932842cdffa8a69" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.599018 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e5293971-dcb4-4cb2-9a98-a4146d2cb8cf","Type":"ContainerDied","Data":"2264ba429db991d786f42e4552ac54e8d981e96b7ebce74c0e1a8686f4280764"} Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.599035 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.603354 5002 generic.go:334] "Generic (PLEG): container finished" podID="67230eb7-7694-4dd0-9f46-e05364abfc60" containerID="d007cc7fa615dddf284df24864c0e4f352d7a2864995f64799a61502b1eb6ac7" exitCode=0 Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.603429 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-sbg8b" event={"ID":"67230eb7-7694-4dd0-9f46-e05364abfc60","Type":"ContainerDied","Data":"d007cc7fa615dddf284df24864c0e4f352d7a2864995f64799a61502b1eb6ac7"} Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.651085 5002 scope.go:117] "RemoveContainer" containerID="290313c4a5f34888faec297ca982fce8169e7565366b729b0a1366da2712ded8" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.660817 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.689912 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.703410 5002 scope.go:117] "RemoveContainer" containerID="8a87e07158ff1e55530706f83e84fb0ed60deea850921a5745f565de29165f0d" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.711494 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.719100 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.757838 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 18:10:51 crc kubenswrapper[5002]: E1203 18:10:51.759355 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5293971-dcb4-4cb2-9a98-a4146d2cb8cf" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.759433 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5293971-dcb4-4cb2-9a98-a4146d2cb8cf" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 18:10:51 crc kubenswrapper[5002]: E1203 18:10:51.759472 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b512976e-6b0e-42b0-ad19-94faff1e0149" containerName="nova-metadata-metadata" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.759480 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b512976e-6b0e-42b0-ad19-94faff1e0149" containerName="nova-metadata-metadata" Dec 03 18:10:51 crc kubenswrapper[5002]: E1203 18:10:51.759512 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b512976e-6b0e-42b0-ad19-94faff1e0149" containerName="nova-metadata-log" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.759525 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b512976e-6b0e-42b0-ad19-94faff1e0149" containerName="nova-metadata-log" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.759814 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b512976e-6b0e-42b0-ad19-94faff1e0149" containerName="nova-metadata-log" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.759829 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b512976e-6b0e-42b0-ad19-94faff1e0149" containerName="nova-metadata-metadata" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.759847 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5293971-dcb4-4cb2-9a98-a4146d2cb8cf" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.761054 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.764912 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.765007 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.772410 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.789525 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.791992 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.796936 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.798437 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.800345 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.813526 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.860038 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vdgvz"] Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.867265 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.870853 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z79ms\" (UniqueName: \"kubernetes.io/projected/a15b6552-1779-4bb6-88fb-bc77c4eed74e-kube-api-access-z79ms\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.870960 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a15b6552-1779-4bb6-88fb-bc77c4eed74e-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.870998 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a15b6552-1779-4bb6-88fb-bc77c4eed74e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.871062 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a15b6552-1779-4bb6-88fb-bc77c4eed74e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.871252 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a15b6552-1779-4bb6-88fb-bc77c4eed74e-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.887986 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vdgvz"] Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.973974 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a15b6552-1779-4bb6-88fb-bc77c4eed74e-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.974037 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z79ms\" (UniqueName: \"kubernetes.io/projected/a15b6552-1779-4bb6-88fb-bc77c4eed74e-kube-api-access-z79ms\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.974071 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41e7f4a9-9a5c-495d-bee8-775645ce1603-logs\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.974097 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vr6x\" (UniqueName: \"kubernetes.io/projected/41e7f4a9-9a5c-495d-bee8-775645ce1603-kube-api-access-7vr6x\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.974280 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-utilities\") pod \"redhat-marketplace-vdgvz\" (UID: \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\") " pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.974362 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-config-data\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.974442 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a15b6552-1779-4bb6-88fb-bc77c4eed74e-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.974487 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a15b6552-1779-4bb6-88fb-bc77c4eed74e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.974557 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.974632 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a15b6552-1779-4bb6-88fb-bc77c4eed74e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.974681 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.979674 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a15b6552-1779-4bb6-88fb-bc77c4eed74e-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.977935 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-catalog-content\") pod \"redhat-marketplace-vdgvz\" (UID: \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\") " pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.979949 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frjcp\" (UniqueName: \"kubernetes.io/projected/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-kube-api-access-frjcp\") pod \"redhat-marketplace-vdgvz\" (UID: \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\") " pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.981098 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a15b6552-1779-4bb6-88fb-bc77c4eed74e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.990667 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a15b6552-1779-4bb6-88fb-bc77c4eed74e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.990679 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a15b6552-1779-4bb6-88fb-bc77c4eed74e-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:51 crc kubenswrapper[5002]: I1203 18:10:51.999453 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z79ms\" (UniqueName: \"kubernetes.io/projected/a15b6552-1779-4bb6-88fb-bc77c4eed74e-kube-api-access-z79ms\") pod \"nova-cell1-novncproxy-0\" (UID: \"a15b6552-1779-4bb6-88fb-bc77c4eed74e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.081623 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41e7f4a9-9a5c-495d-bee8-775645ce1603-logs\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.081687 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vr6x\" (UniqueName: \"kubernetes.io/projected/41e7f4a9-9a5c-495d-bee8-775645ce1603-kube-api-access-7vr6x\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.081736 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-utilities\") pod \"redhat-marketplace-vdgvz\" (UID: \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\") " pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.081783 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-config-data\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.081842 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.081888 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.081953 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-catalog-content\") pod \"redhat-marketplace-vdgvz\" (UID: \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\") " pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.081974 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frjcp\" (UniqueName: \"kubernetes.io/projected/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-kube-api-access-frjcp\") pod \"redhat-marketplace-vdgvz\" (UID: \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\") " pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.082405 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-utilities\") pod \"redhat-marketplace-vdgvz\" (UID: \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\") " pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.082407 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41e7f4a9-9a5c-495d-bee8-775645ce1603-logs\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.082825 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-catalog-content\") pod \"redhat-marketplace-vdgvz\" (UID: \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\") " pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.086134 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-config-data\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.086324 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.088308 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.090371 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.105261 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vr6x\" (UniqueName: \"kubernetes.io/projected/41e7f4a9-9a5c-495d-bee8-775645ce1603-kube-api-access-7vr6x\") pod \"nova-metadata-0\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " pod="openstack/nova-metadata-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.109563 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frjcp\" (UniqueName: \"kubernetes.io/projected/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-kube-api-access-frjcp\") pod \"redhat-marketplace-vdgvz\" (UID: \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\") " pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.117407 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.200642 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.562211 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.620711 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a15b6552-1779-4bb6-88fb-bc77c4eed74e","Type":"ContainerStarted","Data":"c026e34f884d2eb74c7c6695129dc9159b2f2df73aaef078a3a1de4449f3293c"} Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.622494 5002 generic.go:334] "Generic (PLEG): container finished" podID="954c519e-193b-486a-9b88-fc780ef7877b" containerID="37069fe5a03d001c1bbc4d0afec937eaaea8661e9b1ef1a2a1d7af1c7aef6606" exitCode=0 Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.622741 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xw92n" event={"ID":"954c519e-193b-486a-9b88-fc780ef7877b","Type":"ContainerDied","Data":"37069fe5a03d001c1bbc4d0afec937eaaea8661e9b1ef1a2a1d7af1c7aef6606"} Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.698950 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:52 crc kubenswrapper[5002]: W1203 18:10:52.708710 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41e7f4a9_9a5c_495d_bee8_775645ce1603.slice/crio-c4742e1cf54fb23b440b9e13d225728656c38cb153a298c9368e570c4bb65738 WatchSource:0}: Error finding container c4742e1cf54fb23b440b9e13d225728656c38cb153a298c9368e570c4bb65738: Status 404 returned error can't find the container with id c4742e1cf54fb23b440b9e13d225728656c38cb153a298c9368e570c4bb65738 Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.835481 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vdgvz"] Dec 03 18:10:52 crc kubenswrapper[5002]: W1203 18:10:52.844051 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e9b10fe_923e_48fd_85e3_2b1e4e8e1275.slice/crio-9ec5292f5b65e16fe89252394d075f65bd902c4c9e05eeb503ddebd954515b7d WatchSource:0}: Error finding container 9ec5292f5b65e16fe89252394d075f65bd902c4c9e05eeb503ddebd954515b7d: Status 404 returned error can't find the container with id 9ec5292f5b65e16fe89252394d075f65bd902c4c9e05eeb503ddebd954515b7d Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.857439 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b512976e-6b0e-42b0-ad19-94faff1e0149" path="/var/lib/kubelet/pods/b512976e-6b0e-42b0-ad19-94faff1e0149/volumes" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.858174 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5293971-dcb4-4cb2-9a98-a4146d2cb8cf" path="/var/lib/kubelet/pods/e5293971-dcb4-4cb2-9a98-a4146d2cb8cf/volumes" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.897171 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.907717 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-scripts\") pod \"67230eb7-7694-4dd0-9f46-e05364abfc60\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.907831 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-combined-ca-bundle\") pod \"67230eb7-7694-4dd0-9f46-e05364abfc60\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.907935 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nn47c\" (UniqueName: \"kubernetes.io/projected/67230eb7-7694-4dd0-9f46-e05364abfc60-kube-api-access-nn47c\") pod \"67230eb7-7694-4dd0-9f46-e05364abfc60\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.908392 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-config-data\") pod \"67230eb7-7694-4dd0-9f46-e05364abfc60\" (UID: \"67230eb7-7694-4dd0-9f46-e05364abfc60\") " Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.914353 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67230eb7-7694-4dd0-9f46-e05364abfc60-kube-api-access-nn47c" (OuterVolumeSpecName: "kube-api-access-nn47c") pod "67230eb7-7694-4dd0-9f46-e05364abfc60" (UID: "67230eb7-7694-4dd0-9f46-e05364abfc60"). InnerVolumeSpecName "kube-api-access-nn47c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.917281 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-scripts" (OuterVolumeSpecName: "scripts") pod "67230eb7-7694-4dd0-9f46-e05364abfc60" (UID: "67230eb7-7694-4dd0-9f46-e05364abfc60"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.954861 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "67230eb7-7694-4dd0-9f46-e05364abfc60" (UID: "67230eb7-7694-4dd0-9f46-e05364abfc60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:52 crc kubenswrapper[5002]: I1203 18:10:52.963069 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-config-data" (OuterVolumeSpecName: "config-data") pod "67230eb7-7694-4dd0-9f46-e05364abfc60" (UID: "67230eb7-7694-4dd0-9f46-e05364abfc60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.010493 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.010523 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nn47c\" (UniqueName: \"kubernetes.io/projected/67230eb7-7694-4dd0-9f46-e05364abfc60-kube-api-access-nn47c\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.010533 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.010541 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67230eb7-7694-4dd0-9f46-e05364abfc60-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.640738 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-sbg8b" event={"ID":"67230eb7-7694-4dd0-9f46-e05364abfc60","Type":"ContainerDied","Data":"d9b64aba8db0fd6bc6094c1479c54c85ffcb54e031fd4f8ddbe96d974cd174e6"} Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.641166 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9b64aba8db0fd6bc6094c1479c54c85ffcb54e031fd4f8ddbe96d974cd174e6" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.641044 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-sbg8b" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.646703 5002 generic.go:334] "Generic (PLEG): container finished" podID="9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" containerID="739fda6d8cc2c2b6f2d00e4484b29065eca72cf45c1d6a75b01be1cdcbf1ef91" exitCode=0 Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.646844 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdgvz" event={"ID":"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275","Type":"ContainerDied","Data":"739fda6d8cc2c2b6f2d00e4484b29065eca72cf45c1d6a75b01be1cdcbf1ef91"} Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.646879 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdgvz" event={"ID":"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275","Type":"ContainerStarted","Data":"9ec5292f5b65e16fe89252394d075f65bd902c4c9e05eeb503ddebd954515b7d"} Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.649843 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a15b6552-1779-4bb6-88fb-bc77c4eed74e","Type":"ContainerStarted","Data":"6a2f2b8545421024be020fc954b94dd2ffeb5f3de524f828f9f93df757168fab"} Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.650114 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.675311 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41e7f4a9-9a5c-495d-bee8-775645ce1603","Type":"ContainerStarted","Data":"d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f"} Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.675364 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41e7f4a9-9a5c-495d-bee8-775645ce1603","Type":"ContainerStarted","Data":"a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5"} Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.675377 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41e7f4a9-9a5c-495d-bee8-775645ce1603","Type":"ContainerStarted","Data":"c4742e1cf54fb23b440b9e13d225728656c38cb153a298c9368e570c4bb65738"} Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.710988 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.7109703720000002 podStartE2EDuration="2.710970372s" podCreationTimestamp="2025-12-03 18:10:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:53.698130817 +0000 UTC m=+5977.111952705" watchObservedRunningTime="2025-12-03 18:10:53.710970372 +0000 UTC m=+5977.124792250" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.716974 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 18:10:53 crc kubenswrapper[5002]: E1203 18:10:53.717886 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67230eb7-7694-4dd0-9f46-e05364abfc60" containerName="nova-cell1-conductor-db-sync" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.717911 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="67230eb7-7694-4dd0-9f46-e05364abfc60" containerName="nova-cell1-conductor-db-sync" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.718175 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="67230eb7-7694-4dd0-9f46-e05364abfc60" containerName="nova-cell1-conductor-db-sync" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.725348 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.726311 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.732656 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.759325 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.759307153 podStartE2EDuration="2.759307153s" podCreationTimestamp="2025-12-03 18:10:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:53.731168805 +0000 UTC m=+5977.144990693" watchObservedRunningTime="2025-12-03 18:10:53.759307153 +0000 UTC m=+5977.173129041" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.825964 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e2c7b8-f8f4-48c8-a384-66c38910868a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"67e2c7b8-f8f4-48c8-a384-66c38910868a\") " pod="openstack/nova-cell1-conductor-0" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.826259 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rf7jp\" (UniqueName: \"kubernetes.io/projected/67e2c7b8-f8f4-48c8-a384-66c38910868a-kube-api-access-rf7jp\") pod \"nova-cell1-conductor-0\" (UID: \"67e2c7b8-f8f4-48c8-a384-66c38910868a\") " pod="openstack/nova-cell1-conductor-0" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.826688 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67e2c7b8-f8f4-48c8-a384-66c38910868a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"67e2c7b8-f8f4-48c8-a384-66c38910868a\") " pod="openstack/nova-cell1-conductor-0" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.928568 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e2c7b8-f8f4-48c8-a384-66c38910868a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"67e2c7b8-f8f4-48c8-a384-66c38910868a\") " pod="openstack/nova-cell1-conductor-0" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.928670 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rf7jp\" (UniqueName: \"kubernetes.io/projected/67e2c7b8-f8f4-48c8-a384-66c38910868a-kube-api-access-rf7jp\") pod \"nova-cell1-conductor-0\" (UID: \"67e2c7b8-f8f4-48c8-a384-66c38910868a\") " pod="openstack/nova-cell1-conductor-0" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.928734 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67e2c7b8-f8f4-48c8-a384-66c38910868a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"67e2c7b8-f8f4-48c8-a384-66c38910868a\") " pod="openstack/nova-cell1-conductor-0" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.935599 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67e2c7b8-f8f4-48c8-a384-66c38910868a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"67e2c7b8-f8f4-48c8-a384-66c38910868a\") " pod="openstack/nova-cell1-conductor-0" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.943408 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e2c7b8-f8f4-48c8-a384-66c38910868a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"67e2c7b8-f8f4-48c8-a384-66c38910868a\") " pod="openstack/nova-cell1-conductor-0" Dec 03 18:10:53 crc kubenswrapper[5002]: I1203 18:10:53.947178 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rf7jp\" (UniqueName: \"kubernetes.io/projected/67e2c7b8-f8f4-48c8-a384-66c38910868a-kube-api-access-rf7jp\") pod \"nova-cell1-conductor-0\" (UID: \"67e2c7b8-f8f4-48c8-a384-66c38910868a\") " pod="openstack/nova-cell1-conductor-0" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.058535 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.168905 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.234132 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmq94\" (UniqueName: \"kubernetes.io/projected/954c519e-193b-486a-9b88-fc780ef7877b-kube-api-access-lmq94\") pod \"954c519e-193b-486a-9b88-fc780ef7877b\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.234308 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-combined-ca-bundle\") pod \"954c519e-193b-486a-9b88-fc780ef7877b\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.234363 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-config-data\") pod \"954c519e-193b-486a-9b88-fc780ef7877b\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.234543 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-scripts\") pod \"954c519e-193b-486a-9b88-fc780ef7877b\" (UID: \"954c519e-193b-486a-9b88-fc780ef7877b\") " Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.239979 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-scripts" (OuterVolumeSpecName: "scripts") pod "954c519e-193b-486a-9b88-fc780ef7877b" (UID: "954c519e-193b-486a-9b88-fc780ef7877b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.240932 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/954c519e-193b-486a-9b88-fc780ef7877b-kube-api-access-lmq94" (OuterVolumeSpecName: "kube-api-access-lmq94") pod "954c519e-193b-486a-9b88-fc780ef7877b" (UID: "954c519e-193b-486a-9b88-fc780ef7877b"). InnerVolumeSpecName "kube-api-access-lmq94". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.266137 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-config-data" (OuterVolumeSpecName: "config-data") pod "954c519e-193b-486a-9b88-fc780ef7877b" (UID: "954c519e-193b-486a-9b88-fc780ef7877b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.288105 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "954c519e-193b-486a-9b88-fc780ef7877b" (UID: "954c519e-193b-486a-9b88-fc780ef7877b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.338377 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.338417 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.338426 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/954c519e-193b-486a-9b88-fc780ef7877b-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.338434 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmq94\" (UniqueName: \"kubernetes.io/projected/954c519e-193b-486a-9b88-fc780ef7877b-kube-api-access-lmq94\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.517831 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.694868 5002 generic.go:334] "Generic (PLEG): container finished" podID="9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" containerID="8d5ecf9049c02661d8cc86b509a02adb9eb9d94dfae3dd4151ad3a98da517f8b" exitCode=0 Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.694983 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdgvz" event={"ID":"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275","Type":"ContainerDied","Data":"8d5ecf9049c02661d8cc86b509a02adb9eb9d94dfae3dd4151ad3a98da517f8b"} Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.701842 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"67e2c7b8-f8f4-48c8-a384-66c38910868a","Type":"ContainerStarted","Data":"7aa2e1d3bff8adaa777f1330a810bfdc70fb6321b095e3ab5d2a7a55663ca4ee"} Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.706247 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xw92n" event={"ID":"954c519e-193b-486a-9b88-fc780ef7877b","Type":"ContainerDied","Data":"69660d3115dd682299ba024077193e2a410ef216fbb830677b4674416b6bb80c"} Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.706299 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69660d3115dd682299ba024077193e2a410ef216fbb830677b4674416b6bb80c" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.707738 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xw92n" Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.850401 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.850619 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="83cb2af8-6b26-4e89-b9ed-38cb1ad77596" containerName="nova-api-log" containerID="cri-o://ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04" gracePeriod=30 Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.851045 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="83cb2af8-6b26-4e89-b9ed-38cb1ad77596" containerName="nova-api-api" containerID="cri-o://e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2" gracePeriod=30 Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.867412 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.867672 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="11897236-6398-4f41-a242-45dc4af738d0" containerName="nova-scheduler-scheduler" containerID="cri-o://638964fc94eda4af2ea8e4b3cf7875950f0ff1a90288f7221d0b57ec7184eb57" gracePeriod=30 Dec 03 18:10:54 crc kubenswrapper[5002]: I1203 18:10:54.876239 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.436157 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.480646 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-config-data\") pod \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.480972 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-logs\") pod \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.481038 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-combined-ca-bundle\") pod \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.481150 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6724n\" (UniqueName: \"kubernetes.io/projected/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-kube-api-access-6724n\") pod \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\" (UID: \"83cb2af8-6b26-4e89-b9ed-38cb1ad77596\") " Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.481354 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-logs" (OuterVolumeSpecName: "logs") pod "83cb2af8-6b26-4e89-b9ed-38cb1ad77596" (UID: "83cb2af8-6b26-4e89-b9ed-38cb1ad77596"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.482013 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.486644 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-kube-api-access-6724n" (OuterVolumeSpecName: "kube-api-access-6724n") pod "83cb2af8-6b26-4e89-b9ed-38cb1ad77596" (UID: "83cb2af8-6b26-4e89-b9ed-38cb1ad77596"). InnerVolumeSpecName "kube-api-access-6724n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.512042 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "83cb2af8-6b26-4e89-b9ed-38cb1ad77596" (UID: "83cb2af8-6b26-4e89-b9ed-38cb1ad77596"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.528172 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-config-data" (OuterVolumeSpecName: "config-data") pod "83cb2af8-6b26-4e89-b9ed-38cb1ad77596" (UID: "83cb2af8-6b26-4e89-b9ed-38cb1ad77596"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.583616 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.583651 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6724n\" (UniqueName: \"kubernetes.io/projected/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-kube-api-access-6724n\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.583661 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83cb2af8-6b26-4e89-b9ed-38cb1ad77596-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.717353 5002 generic.go:334] "Generic (PLEG): container finished" podID="83cb2af8-6b26-4e89-b9ed-38cb1ad77596" containerID="e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2" exitCode=0 Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.717388 5002 generic.go:334] "Generic (PLEG): container finished" podID="83cb2af8-6b26-4e89-b9ed-38cb1ad77596" containerID="ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04" exitCode=143 Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.717437 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"83cb2af8-6b26-4e89-b9ed-38cb1ad77596","Type":"ContainerDied","Data":"e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2"} Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.717464 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"83cb2af8-6b26-4e89-b9ed-38cb1ad77596","Type":"ContainerDied","Data":"ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04"} Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.717475 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"83cb2af8-6b26-4e89-b9ed-38cb1ad77596","Type":"ContainerDied","Data":"7091616ac1b6738860c9e90ffcdfd05ce1246701ec6fd08af7f7d5a11a17d6df"} Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.717473 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.717490 5002 scope.go:117] "RemoveContainer" containerID="e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.724812 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdgvz" event={"ID":"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275","Type":"ContainerStarted","Data":"4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb"} Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.727769 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="41e7f4a9-9a5c-495d-bee8-775645ce1603" containerName="nova-metadata-log" containerID="cri-o://a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5" gracePeriod=30 Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.728728 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"67e2c7b8-f8f4-48c8-a384-66c38910868a","Type":"ContainerStarted","Data":"91c94f0d9bf01d19a3dc702785f5340351b69f708d0ee72d1bfde8068c8e995b"} Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.728777 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.728824 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="41e7f4a9-9a5c-495d-bee8-775645ce1603" containerName="nova-metadata-metadata" containerID="cri-o://d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f" gracePeriod=30 Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.750205 5002 scope.go:117] "RemoveContainer" containerID="ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.753888 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vdgvz" podStartSLOduration=3.267783103 podStartE2EDuration="4.753868348s" podCreationTimestamp="2025-12-03 18:10:51 +0000 UTC" firstStartedPulling="2025-12-03 18:10:53.649779365 +0000 UTC m=+5977.063601273" lastFinishedPulling="2025-12-03 18:10:55.13586463 +0000 UTC m=+5978.549686518" observedRunningTime="2025-12-03 18:10:55.748239567 +0000 UTC m=+5979.162061475" watchObservedRunningTime="2025-12-03 18:10:55.753868348 +0000 UTC m=+5979.167690226" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.769940 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.76991771 podStartE2EDuration="2.76991771s" podCreationTimestamp="2025-12-03 18:10:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:55.765182322 +0000 UTC m=+5979.179004210" watchObservedRunningTime="2025-12-03 18:10:55.76991771 +0000 UTC m=+5979.183739598" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.782837 5002 scope.go:117] "RemoveContainer" containerID="e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2" Dec 03 18:10:55 crc kubenswrapper[5002]: E1203 18:10:55.783449 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2\": container with ID starting with e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2 not found: ID does not exist" containerID="e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.783491 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2"} err="failed to get container status \"e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2\": rpc error: code = NotFound desc = could not find container \"e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2\": container with ID starting with e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2 not found: ID does not exist" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.783516 5002 scope.go:117] "RemoveContainer" containerID="ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04" Dec 03 18:10:55 crc kubenswrapper[5002]: E1203 18:10:55.784358 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04\": container with ID starting with ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04 not found: ID does not exist" containerID="ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.784404 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04"} err="failed to get container status \"ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04\": rpc error: code = NotFound desc = could not find container \"ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04\": container with ID starting with ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04 not found: ID does not exist" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.784433 5002 scope.go:117] "RemoveContainer" containerID="e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.784946 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2"} err="failed to get container status \"e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2\": rpc error: code = NotFound desc = could not find container \"e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2\": container with ID starting with e01d85f4a0cf9754d9537ff0c3f17fa7a1be94f25ffa7b26c2d3fe63292847a2 not found: ID does not exist" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.784967 5002 scope.go:117] "RemoveContainer" containerID="ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.785969 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04"} err="failed to get container status \"ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04\": rpc error: code = NotFound desc = could not find container \"ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04\": container with ID starting with ba46ec9be91a00adbd509a098b536f46af36aad803388d39830a92386a484c04 not found: ID does not exist" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.797705 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.813226 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.839104 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 18:10:55 crc kubenswrapper[5002]: E1203 18:10:55.839528 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83cb2af8-6b26-4e89-b9ed-38cb1ad77596" containerName="nova-api-log" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.839543 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="83cb2af8-6b26-4e89-b9ed-38cb1ad77596" containerName="nova-api-log" Dec 03 18:10:55 crc kubenswrapper[5002]: E1203 18:10:55.839565 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83cb2af8-6b26-4e89-b9ed-38cb1ad77596" containerName="nova-api-api" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.839573 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="83cb2af8-6b26-4e89-b9ed-38cb1ad77596" containerName="nova-api-api" Dec 03 18:10:55 crc kubenswrapper[5002]: E1203 18:10:55.839593 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="954c519e-193b-486a-9b88-fc780ef7877b" containerName="nova-manage" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.839601 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="954c519e-193b-486a-9b88-fc780ef7877b" containerName="nova-manage" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.839798 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="83cb2af8-6b26-4e89-b9ed-38cb1ad77596" containerName="nova-api-api" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.839814 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="83cb2af8-6b26-4e89-b9ed-38cb1ad77596" containerName="nova-api-log" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.839823 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="954c519e-193b-486a-9b88-fc780ef7877b" containerName="nova-manage" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.840849 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.841608 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.851798 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.888395 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99c190da-fa41-41c0-a9dd-2e795133f7b6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.888516 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqf67\" (UniqueName: \"kubernetes.io/projected/99c190da-fa41-41c0-a9dd-2e795133f7b6-kube-api-access-nqf67\") pod \"nova-api-0\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.888586 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99c190da-fa41-41c0-a9dd-2e795133f7b6-logs\") pod \"nova-api-0\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.888647 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99c190da-fa41-41c0-a9dd-2e795133f7b6-config-data\") pod \"nova-api-0\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.990778 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99c190da-fa41-41c0-a9dd-2e795133f7b6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.991121 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqf67\" (UniqueName: \"kubernetes.io/projected/99c190da-fa41-41c0-a9dd-2e795133f7b6-kube-api-access-nqf67\") pod \"nova-api-0\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.991162 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99c190da-fa41-41c0-a9dd-2e795133f7b6-logs\") pod \"nova-api-0\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.991188 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99c190da-fa41-41c0-a9dd-2e795133f7b6-config-data\") pod \"nova-api-0\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.992527 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99c190da-fa41-41c0-a9dd-2e795133f7b6-logs\") pod \"nova-api-0\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.996237 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99c190da-fa41-41c0-a9dd-2e795133f7b6-config-data\") pod \"nova-api-0\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " pod="openstack/nova-api-0" Dec 03 18:10:55 crc kubenswrapper[5002]: I1203 18:10:55.996710 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99c190da-fa41-41c0-a9dd-2e795133f7b6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " pod="openstack/nova-api-0" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.010221 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqf67\" (UniqueName: \"kubernetes.io/projected/99c190da-fa41-41c0-a9dd-2e795133f7b6-kube-api-access-nqf67\") pod \"nova-api-0\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " pod="openstack/nova-api-0" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.103899 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.171360 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.199357 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79966dc9f5-bw22t"] Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.199645 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" podUID="af217565-3928-470d-a546-1a2706a76ad8" containerName="dnsmasq-dns" containerID="cri-o://60c2fb83848f3651194be7012820b2fdbdfbce4265a073c8d292082cb7df9bd6" gracePeriod=10 Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.500639 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.616089 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-config-data\") pod \"41e7f4a9-9a5c-495d-bee8-775645ce1603\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.616138 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vr6x\" (UniqueName: \"kubernetes.io/projected/41e7f4a9-9a5c-495d-bee8-775645ce1603-kube-api-access-7vr6x\") pod \"41e7f4a9-9a5c-495d-bee8-775645ce1603\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.616181 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41e7f4a9-9a5c-495d-bee8-775645ce1603-logs\") pod \"41e7f4a9-9a5c-495d-bee8-775645ce1603\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.616218 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-combined-ca-bundle\") pod \"41e7f4a9-9a5c-495d-bee8-775645ce1603\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.616306 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-nova-metadata-tls-certs\") pod \"41e7f4a9-9a5c-495d-bee8-775645ce1603\" (UID: \"41e7f4a9-9a5c-495d-bee8-775645ce1603\") " Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.617567 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41e7f4a9-9a5c-495d-bee8-775645ce1603-logs" (OuterVolumeSpecName: "logs") pod "41e7f4a9-9a5c-495d-bee8-775645ce1603" (UID: "41e7f4a9-9a5c-495d-bee8-775645ce1603"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.627110 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41e7f4a9-9a5c-495d-bee8-775645ce1603-kube-api-access-7vr6x" (OuterVolumeSpecName: "kube-api-access-7vr6x") pod "41e7f4a9-9a5c-495d-bee8-775645ce1603" (UID: "41e7f4a9-9a5c-495d-bee8-775645ce1603"). InnerVolumeSpecName "kube-api-access-7vr6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.672555 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-config-data" (OuterVolumeSpecName: "config-data") pod "41e7f4a9-9a5c-495d-bee8-775645ce1603" (UID: "41e7f4a9-9a5c-495d-bee8-775645ce1603"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.691479 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41e7f4a9-9a5c-495d-bee8-775645ce1603" (UID: "41e7f4a9-9a5c-495d-bee8-775645ce1603"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.719280 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.719399 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vr6x\" (UniqueName: \"kubernetes.io/projected/41e7f4a9-9a5c-495d-bee8-775645ce1603-kube-api-access-7vr6x\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.719415 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41e7f4a9-9a5c-495d-bee8-775645ce1603-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.719426 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.754155 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "41e7f4a9-9a5c-495d-bee8-775645ce1603" (UID: "41e7f4a9-9a5c-495d-bee8-775645ce1603"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.762692 5002 generic.go:334] "Generic (PLEG): container finished" podID="41e7f4a9-9a5c-495d-bee8-775645ce1603" containerID="d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f" exitCode=0 Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.762727 5002 generic.go:334] "Generic (PLEG): container finished" podID="41e7f4a9-9a5c-495d-bee8-775645ce1603" containerID="a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5" exitCode=143 Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.762742 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41e7f4a9-9a5c-495d-bee8-775645ce1603","Type":"ContainerDied","Data":"d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f"} Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.762844 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41e7f4a9-9a5c-495d-bee8-775645ce1603","Type":"ContainerDied","Data":"a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5"} Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.762860 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41e7f4a9-9a5c-495d-bee8-775645ce1603","Type":"ContainerDied","Data":"c4742e1cf54fb23b440b9e13d225728656c38cb153a298c9368e570c4bb65738"} Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.762878 5002 scope.go:117] "RemoveContainer" containerID="d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.762802 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.775277 5002 generic.go:334] "Generic (PLEG): container finished" podID="af217565-3928-470d-a546-1a2706a76ad8" containerID="60c2fb83848f3651194be7012820b2fdbdfbce4265a073c8d292082cb7df9bd6" exitCode=0 Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.775331 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" event={"ID":"af217565-3928-470d-a546-1a2706a76ad8","Type":"ContainerDied","Data":"60c2fb83848f3651194be7012820b2fdbdfbce4265a073c8d292082cb7df9bd6"} Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.810864 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.811041 5002 scope.go:117] "RemoveContainer" containerID="a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.821148 5002 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/41e7f4a9-9a5c-495d-bee8-775645ce1603-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.830056 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.862722 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.863944 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83cb2af8-6b26-4e89-b9ed-38cb1ad77596" path="/var/lib/kubelet/pods/83cb2af8-6b26-4e89-b9ed-38cb1ad77596/volumes" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.865516 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.876472 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:56 crc kubenswrapper[5002]: E1203 18:10:56.877145 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41e7f4a9-9a5c-495d-bee8-775645ce1603" containerName="nova-metadata-log" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.877171 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="41e7f4a9-9a5c-495d-bee8-775645ce1603" containerName="nova-metadata-log" Dec 03 18:10:56 crc kubenswrapper[5002]: E1203 18:10:56.877200 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af217565-3928-470d-a546-1a2706a76ad8" containerName="init" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.877210 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="af217565-3928-470d-a546-1a2706a76ad8" containerName="init" Dec 03 18:10:56 crc kubenswrapper[5002]: E1203 18:10:56.877231 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af217565-3928-470d-a546-1a2706a76ad8" containerName="dnsmasq-dns" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.877240 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="af217565-3928-470d-a546-1a2706a76ad8" containerName="dnsmasq-dns" Dec 03 18:10:56 crc kubenswrapper[5002]: E1203 18:10:56.877260 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41e7f4a9-9a5c-495d-bee8-775645ce1603" containerName="nova-metadata-metadata" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.877268 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="41e7f4a9-9a5c-495d-bee8-775645ce1603" containerName="nova-metadata-metadata" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.877487 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="af217565-3928-470d-a546-1a2706a76ad8" containerName="dnsmasq-dns" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.877506 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="41e7f4a9-9a5c-495d-bee8-775645ce1603" containerName="nova-metadata-log" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.877537 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="41e7f4a9-9a5c-495d-bee8-775645ce1603" containerName="nova-metadata-metadata" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.878969 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.882622 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.883440 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.891652 5002 scope.go:117] "RemoveContainer" containerID="d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f" Dec 03 18:10:56 crc kubenswrapper[5002]: E1203 18:10:56.892071 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f\": container with ID starting with d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f not found: ID does not exist" containerID="d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.892108 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f"} err="failed to get container status \"d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f\": rpc error: code = NotFound desc = could not find container \"d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f\": container with ID starting with d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f not found: ID does not exist" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.892141 5002 scope.go:117] "RemoveContainer" containerID="a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5" Dec 03 18:10:56 crc kubenswrapper[5002]: E1203 18:10:56.892416 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5\": container with ID starting with a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5 not found: ID does not exist" containerID="a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.892431 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5"} err="failed to get container status \"a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5\": rpc error: code = NotFound desc = could not find container \"a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5\": container with ID starting with a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5 not found: ID does not exist" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.892443 5002 scope.go:117] "RemoveContainer" containerID="d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.892679 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f"} err="failed to get container status \"d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f\": rpc error: code = NotFound desc = could not find container \"d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f\": container with ID starting with d84df1e4f3f897635dccec22acb650a0b37d8e95673ea7f796b26fe4b43bb58f not found: ID does not exist" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.892692 5002 scope.go:117] "RemoveContainer" containerID="a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.892875 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5"} err="failed to get container status \"a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5\": rpc error: code = NotFound desc = could not find container \"a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5\": container with ID starting with a763f9b5438cb4dc0479dcd1a1d4be4fca8a18ad51515de16f55a78e003fefe5 not found: ID does not exist" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.894531 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.923463 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-ovsdbserver-nb\") pod \"af217565-3928-470d-a546-1a2706a76ad8\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.923604 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdcdl\" (UniqueName: \"kubernetes.io/projected/af217565-3928-470d-a546-1a2706a76ad8-kube-api-access-hdcdl\") pod \"af217565-3928-470d-a546-1a2706a76ad8\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.923638 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-ovsdbserver-sb\") pod \"af217565-3928-470d-a546-1a2706a76ad8\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.923661 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-dns-svc\") pod \"af217565-3928-470d-a546-1a2706a76ad8\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.923682 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-config\") pod \"af217565-3928-470d-a546-1a2706a76ad8\" (UID: \"af217565-3928-470d-a546-1a2706a76ad8\") " Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.923942 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9497e226-713c-4542-b225-9effa5a467e5-logs\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.923988 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.924033 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdvgn\" (UniqueName: \"kubernetes.io/projected/9497e226-713c-4542-b225-9effa5a467e5-kube-api-access-vdvgn\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.924057 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.924097 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-config-data\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.927688 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af217565-3928-470d-a546-1a2706a76ad8-kube-api-access-hdcdl" (OuterVolumeSpecName: "kube-api-access-hdcdl") pod "af217565-3928-470d-a546-1a2706a76ad8" (UID: "af217565-3928-470d-a546-1a2706a76ad8"). InnerVolumeSpecName "kube-api-access-hdcdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.987180 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "af217565-3928-470d-a546-1a2706a76ad8" (UID: "af217565-3928-470d-a546-1a2706a76ad8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.992864 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "af217565-3928-470d-a546-1a2706a76ad8" (UID: "af217565-3928-470d-a546-1a2706a76ad8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:10:56 crc kubenswrapper[5002]: I1203 18:10:56.993790 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-config" (OuterVolumeSpecName: "config") pod "af217565-3928-470d-a546-1a2706a76ad8" (UID: "af217565-3928-470d-a546-1a2706a76ad8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.009967 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "af217565-3928-470d-a546-1a2706a76ad8" (UID: "af217565-3928-470d-a546-1a2706a76ad8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.025884 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-config-data\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.026073 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9497e226-713c-4542-b225-9effa5a467e5-logs\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.026132 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.026186 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdvgn\" (UniqueName: \"kubernetes.io/projected/9497e226-713c-4542-b225-9effa5a467e5-kube-api-access-vdvgn\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.026219 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.026307 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.026322 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdcdl\" (UniqueName: \"kubernetes.io/projected/af217565-3928-470d-a546-1a2706a76ad8-kube-api-access-hdcdl\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.026336 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.026348 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.026359 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af217565-3928-470d-a546-1a2706a76ad8-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.027358 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9497e226-713c-4542-b225-9effa5a467e5-logs\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.030425 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.030822 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.031731 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-config-data\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.043144 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdvgn\" (UniqueName: \"kubernetes.io/projected/9497e226-713c-4542-b225-9effa5a467e5-kube-api-access-vdvgn\") pod \"nova-metadata-0\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " pod="openstack/nova-metadata-0" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.090919 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.224602 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.664268 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.790975 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" event={"ID":"af217565-3928-470d-a546-1a2706a76ad8","Type":"ContainerDied","Data":"1393831c82513f7667d930f4b4d8912ac855a6dd580984ab427284361f2c15de"} Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.791023 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79966dc9f5-bw22t" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.791056 5002 scope.go:117] "RemoveContainer" containerID="60c2fb83848f3651194be7012820b2fdbdfbce4265a073c8d292082cb7df9bd6" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.819384 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"99c190da-fa41-41c0-a9dd-2e795133f7b6","Type":"ContainerStarted","Data":"fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4"} Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.819443 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"99c190da-fa41-41c0-a9dd-2e795133f7b6","Type":"ContainerStarted","Data":"c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce"} Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.819457 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"99c190da-fa41-41c0-a9dd-2e795133f7b6","Type":"ContainerStarted","Data":"c197d3b54d44ecaacc24a6d989e52ae1d8111182d71e64d29a7ef91a7365ef58"} Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.822698 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9497e226-713c-4542-b225-9effa5a467e5","Type":"ContainerStarted","Data":"53d7d449ba48762bb298bfbfb91f38dc848c0f12a060cefdaca2602cd2b2a16d"} Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.835814 5002 scope.go:117] "RemoveContainer" containerID="ea763ba905ec0796392ccda4de8153f547f8ea7ddf297d0960e5c69b692d2a64" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.842921 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.842902915 podStartE2EDuration="2.842902915s" podCreationTimestamp="2025-12-03 18:10:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:57.840926202 +0000 UTC m=+5981.254748090" watchObservedRunningTime="2025-12-03 18:10:57.842902915 +0000 UTC m=+5981.256724803" Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.873951 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79966dc9f5-bw22t"] Dec 03 18:10:57 crc kubenswrapper[5002]: I1203 18:10:57.883778 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79966dc9f5-bw22t"] Dec 03 18:10:58 crc kubenswrapper[5002]: I1203 18:10:58.835900 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9497e226-713c-4542-b225-9effa5a467e5","Type":"ContainerStarted","Data":"2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6"} Dec 03 18:10:58 crc kubenswrapper[5002]: I1203 18:10:58.836259 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9497e226-713c-4542-b225-9effa5a467e5","Type":"ContainerStarted","Data":"d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d"} Dec 03 18:10:58 crc kubenswrapper[5002]: I1203 18:10:58.863657 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41e7f4a9-9a5c-495d-bee8-775645ce1603" path="/var/lib/kubelet/pods/41e7f4a9-9a5c-495d-bee8-775645ce1603/volumes" Dec 03 18:10:58 crc kubenswrapper[5002]: I1203 18:10:58.865493 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af217565-3928-470d-a546-1a2706a76ad8" path="/var/lib/kubelet/pods/af217565-3928-470d-a546-1a2706a76ad8/volumes" Dec 03 18:10:58 crc kubenswrapper[5002]: I1203 18:10:58.872014 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.8719913139999997 podStartE2EDuration="2.871991314s" podCreationTimestamp="2025-12-03 18:10:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:10:58.862662023 +0000 UTC m=+5982.276483911" watchObservedRunningTime="2025-12-03 18:10:58.871991314 +0000 UTC m=+5982.285813242" Dec 03 18:10:59 crc kubenswrapper[5002]: I1203 18:10:59.107987 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 03 18:11:02 crc kubenswrapper[5002]: I1203 18:11:02.090891 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:11:02 crc kubenswrapper[5002]: I1203 18:11:02.108883 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:11:02 crc kubenswrapper[5002]: I1203 18:11:02.201337 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:11:02 crc kubenswrapper[5002]: I1203 18:11:02.201698 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:11:02 crc kubenswrapper[5002]: I1203 18:11:02.225419 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 18:11:02 crc kubenswrapper[5002]: I1203 18:11:02.225504 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 18:11:02 crc kubenswrapper[5002]: I1203 18:11:02.247728 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:11:02 crc kubenswrapper[5002]: I1203 18:11:02.900507 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 03 18:11:02 crc kubenswrapper[5002]: I1203 18:11:02.933413 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.026775 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vdgvz"] Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.085054 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-v9xv5"] Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.086414 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.089864 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.090620 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.094363 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-v9xv5"] Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.150880 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-v9xv5\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.150995 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-config-data\") pod \"nova-cell1-cell-mapping-v9xv5\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.151058 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph5w5\" (UniqueName: \"kubernetes.io/projected/5cbade77-df8d-48c2-883c-deeec7b8f6ea-kube-api-access-ph5w5\") pod \"nova-cell1-cell-mapping-v9xv5\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.151078 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-scripts\") pod \"nova-cell1-cell-mapping-v9xv5\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.251921 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-config-data\") pod \"nova-cell1-cell-mapping-v9xv5\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.252009 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph5w5\" (UniqueName: \"kubernetes.io/projected/5cbade77-df8d-48c2-883c-deeec7b8f6ea-kube-api-access-ph5w5\") pod \"nova-cell1-cell-mapping-v9xv5\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.252035 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-scripts\") pod \"nova-cell1-cell-mapping-v9xv5\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.252107 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-v9xv5\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.257518 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-config-data\") pod \"nova-cell1-cell-mapping-v9xv5\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.258259 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-scripts\") pod \"nova-cell1-cell-mapping-v9xv5\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.265688 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-v9xv5\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.266611 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph5w5\" (UniqueName: \"kubernetes.io/projected/5cbade77-df8d-48c2-883c-deeec7b8f6ea-kube-api-access-ph5w5\") pod \"nova-cell1-cell-mapping-v9xv5\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.410937 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.857462 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-v9xv5"] Dec 03 18:11:03 crc kubenswrapper[5002]: I1203 18:11:03.894861 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-v9xv5" event={"ID":"5cbade77-df8d-48c2-883c-deeec7b8f6ea","Type":"ContainerStarted","Data":"28b252d99e155b85e3ee4afce223bfc89a1adfba13ab3b96a78f9d8f0d714a31"} Dec 03 18:11:04 crc kubenswrapper[5002]: I1203 18:11:04.914645 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-v9xv5" event={"ID":"5cbade77-df8d-48c2-883c-deeec7b8f6ea","Type":"ContainerStarted","Data":"90cc013c7795196536344e68fee35635bd5857d6ea0605b1a6e5adf6022eaf41"} Dec 03 18:11:04 crc kubenswrapper[5002]: I1203 18:11:04.914989 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vdgvz" podUID="9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" containerName="registry-server" containerID="cri-o://4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb" gracePeriod=2 Dec 03 18:11:04 crc kubenswrapper[5002]: I1203 18:11:04.958462 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-v9xv5" podStartSLOduration=1.958434016 podStartE2EDuration="1.958434016s" podCreationTimestamp="2025-12-03 18:11:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:11:04.951098429 +0000 UTC m=+5988.364920337" watchObservedRunningTime="2025-12-03 18:11:04.958434016 +0000 UTC m=+5988.372255944" Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.429535 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.596660 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-catalog-content\") pod \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\" (UID: \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\") " Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.597009 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frjcp\" (UniqueName: \"kubernetes.io/projected/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-kube-api-access-frjcp\") pod \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\" (UID: \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\") " Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.597100 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-utilities\") pod \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\" (UID: \"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275\") " Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.597987 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-utilities" (OuterVolumeSpecName: "utilities") pod "9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" (UID: "9e9b10fe-923e-48fd-85e3-2b1e4e8e1275"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.611152 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" (UID: "9e9b10fe-923e-48fd-85e3-2b1e4e8e1275"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.611970 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-kube-api-access-frjcp" (OuterVolumeSpecName: "kube-api-access-frjcp") pod "9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" (UID: "9e9b10fe-923e-48fd-85e3-2b1e4e8e1275"). InnerVolumeSpecName "kube-api-access-frjcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.698969 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.699002 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frjcp\" (UniqueName: \"kubernetes.io/projected/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-kube-api-access-frjcp\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.699013 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.934406 5002 generic.go:334] "Generic (PLEG): container finished" podID="9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" containerID="4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb" exitCode=0 Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.934461 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdgvz" event={"ID":"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275","Type":"ContainerDied","Data":"4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb"} Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.934495 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vdgvz" Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.934530 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdgvz" event={"ID":"9e9b10fe-923e-48fd-85e3-2b1e4e8e1275","Type":"ContainerDied","Data":"9ec5292f5b65e16fe89252394d075f65bd902c4c9e05eeb503ddebd954515b7d"} Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.934556 5002 scope.go:117] "RemoveContainer" containerID="4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb" Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.987386 5002 scope.go:117] "RemoveContainer" containerID="8d5ecf9049c02661d8cc86b509a02adb9eb9d94dfae3dd4151ad3a98da517f8b" Dec 03 18:11:05 crc kubenswrapper[5002]: I1203 18:11:05.994364 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vdgvz"] Dec 03 18:11:06 crc kubenswrapper[5002]: I1203 18:11:06.016942 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vdgvz"] Dec 03 18:11:06 crc kubenswrapper[5002]: I1203 18:11:06.024974 5002 scope.go:117] "RemoveContainer" containerID="739fda6d8cc2c2b6f2d00e4484b29065eca72cf45c1d6a75b01be1cdcbf1ef91" Dec 03 18:11:06 crc kubenswrapper[5002]: I1203 18:11:06.073580 5002 scope.go:117] "RemoveContainer" containerID="4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb" Dec 03 18:11:06 crc kubenswrapper[5002]: E1203 18:11:06.074236 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb\": container with ID starting with 4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb not found: ID does not exist" containerID="4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb" Dec 03 18:11:06 crc kubenswrapper[5002]: I1203 18:11:06.074289 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb"} err="failed to get container status \"4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb\": rpc error: code = NotFound desc = could not find container \"4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb\": container with ID starting with 4f8a47cb7d49f328877a3df5de10729827b96bfadb313716d2a50396e6dd4beb not found: ID does not exist" Dec 03 18:11:06 crc kubenswrapper[5002]: I1203 18:11:06.074323 5002 scope.go:117] "RemoveContainer" containerID="8d5ecf9049c02661d8cc86b509a02adb9eb9d94dfae3dd4151ad3a98da517f8b" Dec 03 18:11:06 crc kubenswrapper[5002]: E1203 18:11:06.074940 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d5ecf9049c02661d8cc86b509a02adb9eb9d94dfae3dd4151ad3a98da517f8b\": container with ID starting with 8d5ecf9049c02661d8cc86b509a02adb9eb9d94dfae3dd4151ad3a98da517f8b not found: ID does not exist" containerID="8d5ecf9049c02661d8cc86b509a02adb9eb9d94dfae3dd4151ad3a98da517f8b" Dec 03 18:11:06 crc kubenswrapper[5002]: I1203 18:11:06.074970 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d5ecf9049c02661d8cc86b509a02adb9eb9d94dfae3dd4151ad3a98da517f8b"} err="failed to get container status \"8d5ecf9049c02661d8cc86b509a02adb9eb9d94dfae3dd4151ad3a98da517f8b\": rpc error: code = NotFound desc = could not find container \"8d5ecf9049c02661d8cc86b509a02adb9eb9d94dfae3dd4151ad3a98da517f8b\": container with ID starting with 8d5ecf9049c02661d8cc86b509a02adb9eb9d94dfae3dd4151ad3a98da517f8b not found: ID does not exist" Dec 03 18:11:06 crc kubenswrapper[5002]: I1203 18:11:06.074993 5002 scope.go:117] "RemoveContainer" containerID="739fda6d8cc2c2b6f2d00e4484b29065eca72cf45c1d6a75b01be1cdcbf1ef91" Dec 03 18:11:06 crc kubenswrapper[5002]: E1203 18:11:06.075260 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"739fda6d8cc2c2b6f2d00e4484b29065eca72cf45c1d6a75b01be1cdcbf1ef91\": container with ID starting with 739fda6d8cc2c2b6f2d00e4484b29065eca72cf45c1d6a75b01be1cdcbf1ef91 not found: ID does not exist" containerID="739fda6d8cc2c2b6f2d00e4484b29065eca72cf45c1d6a75b01be1cdcbf1ef91" Dec 03 18:11:06 crc kubenswrapper[5002]: I1203 18:11:06.075289 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"739fda6d8cc2c2b6f2d00e4484b29065eca72cf45c1d6a75b01be1cdcbf1ef91"} err="failed to get container status \"739fda6d8cc2c2b6f2d00e4484b29065eca72cf45c1d6a75b01be1cdcbf1ef91\": rpc error: code = NotFound desc = could not find container \"739fda6d8cc2c2b6f2d00e4484b29065eca72cf45c1d6a75b01be1cdcbf1ef91\": container with ID starting with 739fda6d8cc2c2b6f2d00e4484b29065eca72cf45c1d6a75b01be1cdcbf1ef91 not found: ID does not exist" Dec 03 18:11:06 crc kubenswrapper[5002]: I1203 18:11:06.173338 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 18:11:06 crc kubenswrapper[5002]: I1203 18:11:06.173399 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 18:11:06 crc kubenswrapper[5002]: I1203 18:11:06.860337 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" path="/var/lib/kubelet/pods/9e9b10fe-923e-48fd-85e3-2b1e4e8e1275/volumes" Dec 03 18:11:07 crc kubenswrapper[5002]: I1203 18:11:07.225258 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 18:11:07 crc kubenswrapper[5002]: I1203 18:11:07.225336 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 18:11:07 crc kubenswrapper[5002]: I1203 18:11:07.258788 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="99c190da-fa41-41c0-a9dd-2e795133f7b6" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.84:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 18:11:07 crc kubenswrapper[5002]: I1203 18:11:07.258802 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="99c190da-fa41-41c0-a9dd-2e795133f7b6" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.84:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 18:11:08 crc kubenswrapper[5002]: I1203 18:11:08.240035 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="9497e226-713c-4542-b225-9effa5a467e5" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.85:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 18:11:08 crc kubenswrapper[5002]: I1203 18:11:08.240579 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="9497e226-713c-4542-b225-9effa5a467e5" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.85:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 18:11:08 crc kubenswrapper[5002]: I1203 18:11:08.966698 5002 generic.go:334] "Generic (PLEG): container finished" podID="5cbade77-df8d-48c2-883c-deeec7b8f6ea" containerID="90cc013c7795196536344e68fee35635bd5857d6ea0605b1a6e5adf6022eaf41" exitCode=0 Dec 03 18:11:08 crc kubenswrapper[5002]: I1203 18:11:08.966744 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-v9xv5" event={"ID":"5cbade77-df8d-48c2-883c-deeec7b8f6ea","Type":"ContainerDied","Data":"90cc013c7795196536344e68fee35635bd5857d6ea0605b1a6e5adf6022eaf41"} Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.336192 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.495265 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-combined-ca-bundle\") pod \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.495354 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-scripts\") pod \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.495528 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-config-data\") pod \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.495554 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ph5w5\" (UniqueName: \"kubernetes.io/projected/5cbade77-df8d-48c2-883c-deeec7b8f6ea-kube-api-access-ph5w5\") pod \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\" (UID: \"5cbade77-df8d-48c2-883c-deeec7b8f6ea\") " Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.501781 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-scripts" (OuterVolumeSpecName: "scripts") pod "5cbade77-df8d-48c2-883c-deeec7b8f6ea" (UID: "5cbade77-df8d-48c2-883c-deeec7b8f6ea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.503274 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cbade77-df8d-48c2-883c-deeec7b8f6ea-kube-api-access-ph5w5" (OuterVolumeSpecName: "kube-api-access-ph5w5") pod "5cbade77-df8d-48c2-883c-deeec7b8f6ea" (UID: "5cbade77-df8d-48c2-883c-deeec7b8f6ea"). InnerVolumeSpecName "kube-api-access-ph5w5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.531111 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-config-data" (OuterVolumeSpecName: "config-data") pod "5cbade77-df8d-48c2-883c-deeec7b8f6ea" (UID: "5cbade77-df8d-48c2-883c-deeec7b8f6ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.545302 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5cbade77-df8d-48c2-883c-deeec7b8f6ea" (UID: "5cbade77-df8d-48c2-883c-deeec7b8f6ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.598379 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.598410 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.598418 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbade77-df8d-48c2-883c-deeec7b8f6ea-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.598426 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ph5w5\" (UniqueName: \"kubernetes.io/projected/5cbade77-df8d-48c2-883c-deeec7b8f6ea-kube-api-access-ph5w5\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.990971 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-v9xv5" event={"ID":"5cbade77-df8d-48c2-883c-deeec7b8f6ea","Type":"ContainerDied","Data":"28b252d99e155b85e3ee4afce223bfc89a1adfba13ab3b96a78f9d8f0d714a31"} Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.991034 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28b252d99e155b85e3ee4afce223bfc89a1adfba13ab3b96a78f9d8f0d714a31" Dec 03 18:11:10 crc kubenswrapper[5002]: I1203 18:11:10.991080 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-v9xv5" Dec 03 18:11:11 crc kubenswrapper[5002]: I1203 18:11:11.187546 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:11:11 crc kubenswrapper[5002]: I1203 18:11:11.187820 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="99c190da-fa41-41c0-a9dd-2e795133f7b6" containerName="nova-api-log" containerID="cri-o://c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce" gracePeriod=30 Dec 03 18:11:11 crc kubenswrapper[5002]: I1203 18:11:11.188053 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="99c190da-fa41-41c0-a9dd-2e795133f7b6" containerName="nova-api-api" containerID="cri-o://fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4" gracePeriod=30 Dec 03 18:11:11 crc kubenswrapper[5002]: I1203 18:11:11.216422 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:11:11 crc kubenswrapper[5002]: I1203 18:11:11.216859 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="9497e226-713c-4542-b225-9effa5a467e5" containerName="nova-metadata-log" containerID="cri-o://d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d" gracePeriod=30 Dec 03 18:11:11 crc kubenswrapper[5002]: I1203 18:11:11.217791 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="9497e226-713c-4542-b225-9effa5a467e5" containerName="nova-metadata-metadata" containerID="cri-o://2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6" gracePeriod=30 Dec 03 18:11:12 crc kubenswrapper[5002]: I1203 18:11:12.003847 5002 generic.go:334] "Generic (PLEG): container finished" podID="99c190da-fa41-41c0-a9dd-2e795133f7b6" containerID="c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce" exitCode=143 Dec 03 18:11:12 crc kubenswrapper[5002]: I1203 18:11:12.003924 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"99c190da-fa41-41c0-a9dd-2e795133f7b6","Type":"ContainerDied","Data":"c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce"} Dec 03 18:11:12 crc kubenswrapper[5002]: I1203 18:11:12.006910 5002 generic.go:334] "Generic (PLEG): container finished" podID="9497e226-713c-4542-b225-9effa5a467e5" containerID="d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d" exitCode=143 Dec 03 18:11:12 crc kubenswrapper[5002]: I1203 18:11:12.006955 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9497e226-713c-4542-b225-9effa5a467e5","Type":"ContainerDied","Data":"d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d"} Dec 03 18:11:14 crc kubenswrapper[5002]: I1203 18:11:14.830115 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:11:14 crc kubenswrapper[5002]: I1203 18:11:14.991563 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9497e226-713c-4542-b225-9effa5a467e5-logs\") pod \"9497e226-713c-4542-b225-9effa5a467e5\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " Dec 03 18:11:14 crc kubenswrapper[5002]: I1203 18:11:14.991641 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-combined-ca-bundle\") pod \"9497e226-713c-4542-b225-9effa5a467e5\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " Dec 03 18:11:14 crc kubenswrapper[5002]: I1203 18:11:14.991724 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdvgn\" (UniqueName: \"kubernetes.io/projected/9497e226-713c-4542-b225-9effa5a467e5-kube-api-access-vdvgn\") pod \"9497e226-713c-4542-b225-9effa5a467e5\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " Dec 03 18:11:14 crc kubenswrapper[5002]: I1203 18:11:14.991901 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-nova-metadata-tls-certs\") pod \"9497e226-713c-4542-b225-9effa5a467e5\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " Dec 03 18:11:14 crc kubenswrapper[5002]: I1203 18:11:14.991966 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-config-data\") pod \"9497e226-713c-4542-b225-9effa5a467e5\" (UID: \"9497e226-713c-4542-b225-9effa5a467e5\") " Dec 03 18:11:14 crc kubenswrapper[5002]: I1203 18:11:14.993011 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9497e226-713c-4542-b225-9effa5a467e5-logs" (OuterVolumeSpecName: "logs") pod "9497e226-713c-4542-b225-9effa5a467e5" (UID: "9497e226-713c-4542-b225-9effa5a467e5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:11:14 crc kubenswrapper[5002]: I1203 18:11:14.998826 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9497e226-713c-4542-b225-9effa5a467e5-kube-api-access-vdvgn" (OuterVolumeSpecName: "kube-api-access-vdvgn") pod "9497e226-713c-4542-b225-9effa5a467e5" (UID: "9497e226-713c-4542-b225-9effa5a467e5"). InnerVolumeSpecName "kube-api-access-vdvgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.027104 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9497e226-713c-4542-b225-9effa5a467e5" (UID: "9497e226-713c-4542-b225-9effa5a467e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.029935 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-config-data" (OuterVolumeSpecName: "config-data") pod "9497e226-713c-4542-b225-9effa5a467e5" (UID: "9497e226-713c-4542-b225-9effa5a467e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.039733 5002 generic.go:334] "Generic (PLEG): container finished" podID="9497e226-713c-4542-b225-9effa5a467e5" containerID="2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6" exitCode=0 Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.039909 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.039868 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9497e226-713c-4542-b225-9effa5a467e5","Type":"ContainerDied","Data":"2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6"} Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.040270 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9497e226-713c-4542-b225-9effa5a467e5","Type":"ContainerDied","Data":"53d7d449ba48762bb298bfbfb91f38dc848c0f12a060cefdaca2602cd2b2a16d"} Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.040339 5002 scope.go:117] "RemoveContainer" containerID="2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.061592 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "9497e226-713c-4542-b225-9effa5a467e5" (UID: "9497e226-713c-4542-b225-9effa5a467e5"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.095431 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9497e226-713c-4542-b225-9effa5a467e5-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.095463 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.095475 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdvgn\" (UniqueName: \"kubernetes.io/projected/9497e226-713c-4542-b225-9effa5a467e5-kube-api-access-vdvgn\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.095485 5002 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.095494 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9497e226-713c-4542-b225-9effa5a467e5-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.121390 5002 scope.go:117] "RemoveContainer" containerID="d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.143998 5002 scope.go:117] "RemoveContainer" containerID="2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6" Dec 03 18:11:15 crc kubenswrapper[5002]: E1203 18:11:15.144920 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6\": container with ID starting with 2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6 not found: ID does not exist" containerID="2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.144998 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6"} err="failed to get container status \"2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6\": rpc error: code = NotFound desc = could not find container \"2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6\": container with ID starting with 2ba3bf658d1d8fa0ee7af534294d9854c378b50b8cd0a7211a2d9cde7aaff6b6 not found: ID does not exist" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.145059 5002 scope.go:117] "RemoveContainer" containerID="d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d" Dec 03 18:11:15 crc kubenswrapper[5002]: E1203 18:11:15.145697 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d\": container with ID starting with d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d not found: ID does not exist" containerID="d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.145731 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d"} err="failed to get container status \"d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d\": rpc error: code = NotFound desc = could not find container \"d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d\": container with ID starting with d0fe66420e6968558baa7440bd96f2ed4fbce4ff050ac5745a8fbf2eafce347d not found: ID does not exist" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.394044 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.413944 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.424665 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:11:15 crc kubenswrapper[5002]: E1203 18:11:15.425179 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" containerName="registry-server" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.425201 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" containerName="registry-server" Dec 03 18:11:15 crc kubenswrapper[5002]: E1203 18:11:15.425222 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" containerName="extract-content" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.425230 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" containerName="extract-content" Dec 03 18:11:15 crc kubenswrapper[5002]: E1203 18:11:15.425250 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9497e226-713c-4542-b225-9effa5a467e5" containerName="nova-metadata-metadata" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.425260 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9497e226-713c-4542-b225-9effa5a467e5" containerName="nova-metadata-metadata" Dec 03 18:11:15 crc kubenswrapper[5002]: E1203 18:11:15.425270 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cbade77-df8d-48c2-883c-deeec7b8f6ea" containerName="nova-manage" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.425278 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cbade77-df8d-48c2-883c-deeec7b8f6ea" containerName="nova-manage" Dec 03 18:11:15 crc kubenswrapper[5002]: E1203 18:11:15.425298 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" containerName="extract-utilities" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.425306 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" containerName="extract-utilities" Dec 03 18:11:15 crc kubenswrapper[5002]: E1203 18:11:15.425331 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9497e226-713c-4542-b225-9effa5a467e5" containerName="nova-metadata-log" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.425338 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9497e226-713c-4542-b225-9effa5a467e5" containerName="nova-metadata-log" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.425568 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9497e226-713c-4542-b225-9effa5a467e5" containerName="nova-metadata-metadata" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.425588 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cbade77-df8d-48c2-883c-deeec7b8f6ea" containerName="nova-manage" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.425607 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9497e226-713c-4542-b225-9effa5a467e5" containerName="nova-metadata-log" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.425622 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e9b10fe-923e-48fd-85e3-2b1e4e8e1275" containerName="registry-server" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.426929 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.434083 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.434857 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.437447 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.502564 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54ec48d2-46c8-4162-93a5-50355a8637a9-logs\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.502664 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54ec48d2-46c8-4162-93a5-50355a8637a9-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.502831 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ec48d2-46c8-4162-93a5-50355a8637a9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.502862 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vf9w9\" (UniqueName: \"kubernetes.io/projected/54ec48d2-46c8-4162-93a5-50355a8637a9-kube-api-access-vf9w9\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.503957 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54ec48d2-46c8-4162-93a5-50355a8637a9-config-data\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.608607 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54ec48d2-46c8-4162-93a5-50355a8637a9-config-data\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.608806 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54ec48d2-46c8-4162-93a5-50355a8637a9-logs\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.608856 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54ec48d2-46c8-4162-93a5-50355a8637a9-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.608996 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ec48d2-46c8-4162-93a5-50355a8637a9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.609050 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vf9w9\" (UniqueName: \"kubernetes.io/projected/54ec48d2-46c8-4162-93a5-50355a8637a9-kube-api-access-vf9w9\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.609321 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54ec48d2-46c8-4162-93a5-50355a8637a9-logs\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.613393 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ec48d2-46c8-4162-93a5-50355a8637a9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.613810 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54ec48d2-46c8-4162-93a5-50355a8637a9-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.614212 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54ec48d2-46c8-4162-93a5-50355a8637a9-config-data\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.630323 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vf9w9\" (UniqueName: \"kubernetes.io/projected/54ec48d2-46c8-4162-93a5-50355a8637a9-kube-api-access-vf9w9\") pod \"nova-metadata-0\" (UID: \"54ec48d2-46c8-4162-93a5-50355a8637a9\") " pod="openstack/nova-metadata-0" Dec 03 18:11:15 crc kubenswrapper[5002]: I1203 18:11:15.764252 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 18:11:16 crc kubenswrapper[5002]: I1203 18:11:16.243161 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 18:11:16 crc kubenswrapper[5002]: I1203 18:11:16.852146 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9497e226-713c-4542-b225-9effa5a467e5" path="/var/lib/kubelet/pods/9497e226-713c-4542-b225-9effa5a467e5/volumes" Dec 03 18:11:17 crc kubenswrapper[5002]: I1203 18:11:17.060160 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54ec48d2-46c8-4162-93a5-50355a8637a9","Type":"ContainerStarted","Data":"f3012034d860a5247104c1987d958a954b1197028319bde897d78df43d7001c1"} Dec 03 18:11:17 crc kubenswrapper[5002]: I1203 18:11:17.060217 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54ec48d2-46c8-4162-93a5-50355a8637a9","Type":"ContainerStarted","Data":"49278de0037b1efb77722df0a7c9594a4af9e436575c417ac99cb4512edb516d"} Dec 03 18:11:17 crc kubenswrapper[5002]: I1203 18:11:17.060230 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54ec48d2-46c8-4162-93a5-50355a8637a9","Type":"ContainerStarted","Data":"1fe8083d88cb7676c66b50b56212aa9966c78eae05301a6755669483ff6f4651"} Dec 03 18:11:17 crc kubenswrapper[5002]: I1203 18:11:17.086806 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.086787241 podStartE2EDuration="2.086787241s" podCreationTimestamp="2025-12-03 18:11:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:11:17.08117399 +0000 UTC m=+6000.494995878" watchObservedRunningTime="2025-12-03 18:11:17.086787241 +0000 UTC m=+6000.500609139" Dec 03 18:11:20 crc kubenswrapper[5002]: I1203 18:11:20.764443 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 18:11:20 crc kubenswrapper[5002]: I1203 18:11:20.765199 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 18:11:20 crc kubenswrapper[5002]: I1203 18:11:20.916326 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:11:20 crc kubenswrapper[5002]: I1203 18:11:20.916382 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:11:20 crc kubenswrapper[5002]: I1203 18:11:20.916428 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 18:11:20 crc kubenswrapper[5002]: I1203 18:11:20.917244 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 18:11:20 crc kubenswrapper[5002]: I1203 18:11:20.917312 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" gracePeriod=600 Dec 03 18:11:21 crc kubenswrapper[5002]: E1203 18:11:21.050792 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:11:21 crc kubenswrapper[5002]: I1203 18:11:21.107359 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" exitCode=0 Dec 03 18:11:21 crc kubenswrapper[5002]: I1203 18:11:21.107404 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5"} Dec 03 18:11:21 crc kubenswrapper[5002]: I1203 18:11:21.107473 5002 scope.go:117] "RemoveContainer" containerID="f5d0d4d14dd7dac7b94bb6ad5c76c5a78375d0dbd75748edfcbc430636410612" Dec 03 18:11:21 crc kubenswrapper[5002]: I1203 18:11:21.108180 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:11:21 crc kubenswrapper[5002]: E1203 18:11:21.108475 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.132001 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.166313 5002 generic.go:334] "Generic (PLEG): container finished" podID="99c190da-fa41-41c0-a9dd-2e795133f7b6" containerID="fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4" exitCode=0 Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.166404 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"99c190da-fa41-41c0-a9dd-2e795133f7b6","Type":"ContainerDied","Data":"fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4"} Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.166412 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.166439 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"99c190da-fa41-41c0-a9dd-2e795133f7b6","Type":"ContainerDied","Data":"c197d3b54d44ecaacc24a6d989e52ae1d8111182d71e64d29a7ef91a7365ef58"} Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.166461 5002 scope.go:117] "RemoveContainer" containerID="fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.172660 5002 generic.go:334] "Generic (PLEG): container finished" podID="11897236-6398-4f41-a242-45dc4af738d0" containerID="638964fc94eda4af2ea8e4b3cf7875950f0ff1a90288f7221d0b57ec7184eb57" exitCode=137 Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.172716 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"11897236-6398-4f41-a242-45dc4af738d0","Type":"ContainerDied","Data":"638964fc94eda4af2ea8e4b3cf7875950f0ff1a90288f7221d0b57ec7184eb57"} Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.197930 5002 scope.go:117] "RemoveContainer" containerID="c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.222108 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99c190da-fa41-41c0-a9dd-2e795133f7b6-combined-ca-bundle\") pod \"99c190da-fa41-41c0-a9dd-2e795133f7b6\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.222305 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99c190da-fa41-41c0-a9dd-2e795133f7b6-logs\") pod \"99c190da-fa41-41c0-a9dd-2e795133f7b6\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.222408 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99c190da-fa41-41c0-a9dd-2e795133f7b6-config-data\") pod \"99c190da-fa41-41c0-a9dd-2e795133f7b6\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.222453 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqf67\" (UniqueName: \"kubernetes.io/projected/99c190da-fa41-41c0-a9dd-2e795133f7b6-kube-api-access-nqf67\") pod \"99c190da-fa41-41c0-a9dd-2e795133f7b6\" (UID: \"99c190da-fa41-41c0-a9dd-2e795133f7b6\") " Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.223357 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99c190da-fa41-41c0-a9dd-2e795133f7b6-logs" (OuterVolumeSpecName: "logs") pod "99c190da-fa41-41c0-a9dd-2e795133f7b6" (UID: "99c190da-fa41-41c0-a9dd-2e795133f7b6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.242048 5002 scope.go:117] "RemoveContainer" containerID="fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4" Dec 03 18:11:25 crc kubenswrapper[5002]: E1203 18:11:25.242862 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4\": container with ID starting with fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4 not found: ID does not exist" containerID="fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.242943 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4"} err="failed to get container status \"fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4\": rpc error: code = NotFound desc = could not find container \"fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4\": container with ID starting with fef8b41a647b961fd446c15107f4ea0e88d46b3bf5c32809d144dfd79ff0fbe4 not found: ID does not exist" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.242977 5002 scope.go:117] "RemoveContainer" containerID="c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce" Dec 03 18:11:25 crc kubenswrapper[5002]: E1203 18:11:25.243471 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce\": container with ID starting with c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce not found: ID does not exist" containerID="c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.243517 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce"} err="failed to get container status \"c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce\": rpc error: code = NotFound desc = could not find container \"c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce\": container with ID starting with c1bb04338d219d45fea59714aecd215629d011ea64f57b65a163a1f021a4b6ce not found: ID does not exist" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.247516 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99c190da-fa41-41c0-a9dd-2e795133f7b6-kube-api-access-nqf67" (OuterVolumeSpecName: "kube-api-access-nqf67") pod "99c190da-fa41-41c0-a9dd-2e795133f7b6" (UID: "99c190da-fa41-41c0-a9dd-2e795133f7b6"). InnerVolumeSpecName "kube-api-access-nqf67". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.254114 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99c190da-fa41-41c0-a9dd-2e795133f7b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "99c190da-fa41-41c0-a9dd-2e795133f7b6" (UID: "99c190da-fa41-41c0-a9dd-2e795133f7b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.258629 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99c190da-fa41-41c0-a9dd-2e795133f7b6-config-data" (OuterVolumeSpecName: "config-data") pod "99c190da-fa41-41c0-a9dd-2e795133f7b6" (UID: "99c190da-fa41-41c0-a9dd-2e795133f7b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.291605 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.324303 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99c190da-fa41-41c0-a9dd-2e795133f7b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.324335 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99c190da-fa41-41c0-a9dd-2e795133f7b6-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.324345 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99c190da-fa41-41c0-a9dd-2e795133f7b6-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.324355 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqf67\" (UniqueName: \"kubernetes.io/projected/99c190da-fa41-41c0-a9dd-2e795133f7b6-kube-api-access-nqf67\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.426137 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11897236-6398-4f41-a242-45dc4af738d0-combined-ca-bundle\") pod \"11897236-6398-4f41-a242-45dc4af738d0\" (UID: \"11897236-6398-4f41-a242-45dc4af738d0\") " Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.426286 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpxh4\" (UniqueName: \"kubernetes.io/projected/11897236-6398-4f41-a242-45dc4af738d0-kube-api-access-dpxh4\") pod \"11897236-6398-4f41-a242-45dc4af738d0\" (UID: \"11897236-6398-4f41-a242-45dc4af738d0\") " Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.426540 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11897236-6398-4f41-a242-45dc4af738d0-config-data\") pod \"11897236-6398-4f41-a242-45dc4af738d0\" (UID: \"11897236-6398-4f41-a242-45dc4af738d0\") " Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.430795 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11897236-6398-4f41-a242-45dc4af738d0-kube-api-access-dpxh4" (OuterVolumeSpecName: "kube-api-access-dpxh4") pod "11897236-6398-4f41-a242-45dc4af738d0" (UID: "11897236-6398-4f41-a242-45dc4af738d0"). InnerVolumeSpecName "kube-api-access-dpxh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.453298 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11897236-6398-4f41-a242-45dc4af738d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "11897236-6398-4f41-a242-45dc4af738d0" (UID: "11897236-6398-4f41-a242-45dc4af738d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.455545 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11897236-6398-4f41-a242-45dc4af738d0-config-data" (OuterVolumeSpecName: "config-data") pod "11897236-6398-4f41-a242-45dc4af738d0" (UID: "11897236-6398-4f41-a242-45dc4af738d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.499817 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.511525 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.527977 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 18:11:25 crc kubenswrapper[5002]: E1203 18:11:25.528401 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99c190da-fa41-41c0-a9dd-2e795133f7b6" containerName="nova-api-log" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.528419 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="99c190da-fa41-41c0-a9dd-2e795133f7b6" containerName="nova-api-log" Dec 03 18:11:25 crc kubenswrapper[5002]: E1203 18:11:25.528432 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11897236-6398-4f41-a242-45dc4af738d0" containerName="nova-scheduler-scheduler" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.528440 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="11897236-6398-4f41-a242-45dc4af738d0" containerName="nova-scheduler-scheduler" Dec 03 18:11:25 crc kubenswrapper[5002]: E1203 18:11:25.528458 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99c190da-fa41-41c0-a9dd-2e795133f7b6" containerName="nova-api-api" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.528463 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="99c190da-fa41-41c0-a9dd-2e795133f7b6" containerName="nova-api-api" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.528634 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11897236-6398-4f41-a242-45dc4af738d0-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.528685 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11897236-6398-4f41-a242-45dc4af738d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.528700 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpxh4\" (UniqueName: \"kubernetes.io/projected/11897236-6398-4f41-a242-45dc4af738d0-kube-api-access-dpxh4\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.528646 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="11897236-6398-4f41-a242-45dc4af738d0" containerName="nova-scheduler-scheduler" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.528831 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="99c190da-fa41-41c0-a9dd-2e795133f7b6" containerName="nova-api-api" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.528885 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="99c190da-fa41-41c0-a9dd-2e795133f7b6" containerName="nova-api-log" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.530288 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.532407 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.542687 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.629964 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66ltw\" (UniqueName: \"kubernetes.io/projected/394bff6c-5336-4f5e-a786-69a38c259f58-kube-api-access-66ltw\") pod \"nova-api-0\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.630005 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/394bff6c-5336-4f5e-a786-69a38c259f58-logs\") pod \"nova-api-0\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.630027 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/394bff6c-5336-4f5e-a786-69a38c259f58-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.630062 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/394bff6c-5336-4f5e-a786-69a38c259f58-config-data\") pod \"nova-api-0\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.732115 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66ltw\" (UniqueName: \"kubernetes.io/projected/394bff6c-5336-4f5e-a786-69a38c259f58-kube-api-access-66ltw\") pod \"nova-api-0\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.732517 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/394bff6c-5336-4f5e-a786-69a38c259f58-logs\") pod \"nova-api-0\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.732555 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/394bff6c-5336-4f5e-a786-69a38c259f58-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.732593 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/394bff6c-5336-4f5e-a786-69a38c259f58-config-data\") pod \"nova-api-0\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.733011 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/394bff6c-5336-4f5e-a786-69a38c259f58-logs\") pod \"nova-api-0\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.744997 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/394bff6c-5336-4f5e-a786-69a38c259f58-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.745054 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/394bff6c-5336-4f5e-a786-69a38c259f58-config-data\") pod \"nova-api-0\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.749374 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66ltw\" (UniqueName: \"kubernetes.io/projected/394bff6c-5336-4f5e-a786-69a38c259f58-kube-api-access-66ltw\") pod \"nova-api-0\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " pod="openstack/nova-api-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.765191 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.765426 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 18:11:25 crc kubenswrapper[5002]: I1203 18:11:25.855481 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.187878 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.188638 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"11897236-6398-4f41-a242-45dc4af738d0","Type":"ContainerDied","Data":"48a1289c53dd19d3ee8e870aa932073a553eadf6f6ab3b0d1c27abc9d4e5fe1b"} Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.188700 5002 scope.go:117] "RemoveContainer" containerID="638964fc94eda4af2ea8e4b3cf7875950f0ff1a90288f7221d0b57ec7184eb57" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.250932 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.265446 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.277949 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.279885 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.282455 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.296939 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.332061 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.453316 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scpbw\" (UniqueName: \"kubernetes.io/projected/bea47cf0-de73-4742-ae2b-8c344f3e0fb5-kube-api-access-scpbw\") pod \"nova-scheduler-0\" (UID: \"bea47cf0-de73-4742-ae2b-8c344f3e0fb5\") " pod="openstack/nova-scheduler-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.453421 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea47cf0-de73-4742-ae2b-8c344f3e0fb5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bea47cf0-de73-4742-ae2b-8c344f3e0fb5\") " pod="openstack/nova-scheduler-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.453522 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bea47cf0-de73-4742-ae2b-8c344f3e0fb5-config-data\") pod \"nova-scheduler-0\" (UID: \"bea47cf0-de73-4742-ae2b-8c344f3e0fb5\") " pod="openstack/nova-scheduler-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.555105 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bea47cf0-de73-4742-ae2b-8c344f3e0fb5-config-data\") pod \"nova-scheduler-0\" (UID: \"bea47cf0-de73-4742-ae2b-8c344f3e0fb5\") " pod="openstack/nova-scheduler-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.555241 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scpbw\" (UniqueName: \"kubernetes.io/projected/bea47cf0-de73-4742-ae2b-8c344f3e0fb5-kube-api-access-scpbw\") pod \"nova-scheduler-0\" (UID: \"bea47cf0-de73-4742-ae2b-8c344f3e0fb5\") " pod="openstack/nova-scheduler-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.555303 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea47cf0-de73-4742-ae2b-8c344f3e0fb5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bea47cf0-de73-4742-ae2b-8c344f3e0fb5\") " pod="openstack/nova-scheduler-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.559888 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea47cf0-de73-4742-ae2b-8c344f3e0fb5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bea47cf0-de73-4742-ae2b-8c344f3e0fb5\") " pod="openstack/nova-scheduler-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.561464 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bea47cf0-de73-4742-ae2b-8c344f3e0fb5-config-data\") pod \"nova-scheduler-0\" (UID: \"bea47cf0-de73-4742-ae2b-8c344f3e0fb5\") " pod="openstack/nova-scheduler-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.582173 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scpbw\" (UniqueName: \"kubernetes.io/projected/bea47cf0-de73-4742-ae2b-8c344f3e0fb5-kube-api-access-scpbw\") pod \"nova-scheduler-0\" (UID: \"bea47cf0-de73-4742-ae2b-8c344f3e0fb5\") " pod="openstack/nova-scheduler-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.607671 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.781078 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="54ec48d2-46c8-4162-93a5-50355a8637a9" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.87:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.781938 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="54ec48d2-46c8-4162-93a5-50355a8637a9" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.87:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.858366 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11897236-6398-4f41-a242-45dc4af738d0" path="/var/lib/kubelet/pods/11897236-6398-4f41-a242-45dc4af738d0/volumes" Dec 03 18:11:26 crc kubenswrapper[5002]: I1203 18:11:26.859463 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99c190da-fa41-41c0-a9dd-2e795133f7b6" path="/var/lib/kubelet/pods/99c190da-fa41-41c0-a9dd-2e795133f7b6/volumes" Dec 03 18:11:27 crc kubenswrapper[5002]: W1203 18:11:27.101814 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbea47cf0_de73_4742_ae2b_8c344f3e0fb5.slice/crio-340c0045a34078ba8d5c4c775569dd0cbc2e6865cef0e69c98839037ad2bc149 WatchSource:0}: Error finding container 340c0045a34078ba8d5c4c775569dd0cbc2e6865cef0e69c98839037ad2bc149: Status 404 returned error can't find the container with id 340c0045a34078ba8d5c4c775569dd0cbc2e6865cef0e69c98839037ad2bc149 Dec 03 18:11:27 crc kubenswrapper[5002]: I1203 18:11:27.101857 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 18:11:27 crc kubenswrapper[5002]: I1203 18:11:27.197642 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bea47cf0-de73-4742-ae2b-8c344f3e0fb5","Type":"ContainerStarted","Data":"340c0045a34078ba8d5c4c775569dd0cbc2e6865cef0e69c98839037ad2bc149"} Dec 03 18:11:27 crc kubenswrapper[5002]: I1203 18:11:27.201148 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"394bff6c-5336-4f5e-a786-69a38c259f58","Type":"ContainerStarted","Data":"7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b"} Dec 03 18:11:27 crc kubenswrapper[5002]: I1203 18:11:27.201171 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"394bff6c-5336-4f5e-a786-69a38c259f58","Type":"ContainerStarted","Data":"23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e"} Dec 03 18:11:27 crc kubenswrapper[5002]: I1203 18:11:27.201180 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"394bff6c-5336-4f5e-a786-69a38c259f58","Type":"ContainerStarted","Data":"1874a94fc89392d06dd873e750f12449dea31f3286ea158888b9de14aa3392c0"} Dec 03 18:11:27 crc kubenswrapper[5002]: I1203 18:11:27.217617 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.217602041 podStartE2EDuration="2.217602041s" podCreationTimestamp="2025-12-03 18:11:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:11:27.217204001 +0000 UTC m=+6010.631025909" watchObservedRunningTime="2025-12-03 18:11:27.217602041 +0000 UTC m=+6010.631423929" Dec 03 18:11:28 crc kubenswrapper[5002]: I1203 18:11:28.213684 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bea47cf0-de73-4742-ae2b-8c344f3e0fb5","Type":"ContainerStarted","Data":"8fe8d0a4c97db7007aea8e539deafe467d3361471ceb2efc9d11e1cc558ad696"} Dec 03 18:11:28 crc kubenswrapper[5002]: I1203 18:11:28.237871 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.237846962 podStartE2EDuration="2.237846962s" podCreationTimestamp="2025-12-03 18:11:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:11:28.235428917 +0000 UTC m=+6011.649250815" watchObservedRunningTime="2025-12-03 18:11:28.237846962 +0000 UTC m=+6011.651668870" Dec 03 18:11:31 crc kubenswrapper[5002]: I1203 18:11:31.608960 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 18:11:35 crc kubenswrapper[5002]: I1203 18:11:35.771893 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 18:11:35 crc kubenswrapper[5002]: I1203 18:11:35.772610 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 18:11:35 crc kubenswrapper[5002]: I1203 18:11:35.779176 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 18:11:35 crc kubenswrapper[5002]: I1203 18:11:35.779721 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 18:11:35 crc kubenswrapper[5002]: I1203 18:11:35.849336 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:11:35 crc kubenswrapper[5002]: E1203 18:11:35.849640 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:11:35 crc kubenswrapper[5002]: I1203 18:11:35.857340 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 18:11:35 crc kubenswrapper[5002]: I1203 18:11:35.857431 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 18:11:36 crc kubenswrapper[5002]: I1203 18:11:36.608593 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 18:11:36 crc kubenswrapper[5002]: I1203 18:11:36.646296 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 18:11:36 crc kubenswrapper[5002]: I1203 18:11:36.939053 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="394bff6c-5336-4f5e-a786-69a38c259f58" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.88:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 18:11:36 crc kubenswrapper[5002]: I1203 18:11:36.939154 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="394bff6c-5336-4f5e-a786-69a38c259f58" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.88:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 18:11:37 crc kubenswrapper[5002]: I1203 18:11:37.334559 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 18:11:45 crc kubenswrapper[5002]: I1203 18:11:45.859229 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 18:11:45 crc kubenswrapper[5002]: I1203 18:11:45.860241 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 18:11:45 crc kubenswrapper[5002]: I1203 18:11:45.861060 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 18:11:45 crc kubenswrapper[5002]: I1203 18:11:45.864254 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.396209 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.400671 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.610287 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59dfd5fdc9-m86d8"] Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.612388 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.622161 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59dfd5fdc9-m86d8"] Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.684158 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e7ff606-dfd3-4012-ad37-d96373c36ee8-ovsdbserver-sb\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.684252 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7ff606-dfd3-4012-ad37-d96373c36ee8-config\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.684306 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e7ff606-dfd3-4012-ad37-d96373c36ee8-ovsdbserver-nb\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.684486 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb92m\" (UniqueName: \"kubernetes.io/projected/8e7ff606-dfd3-4012-ad37-d96373c36ee8-kube-api-access-xb92m\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.684637 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e7ff606-dfd3-4012-ad37-d96373c36ee8-dns-svc\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.788806 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e7ff606-dfd3-4012-ad37-d96373c36ee8-dns-svc\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.788964 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e7ff606-dfd3-4012-ad37-d96373c36ee8-ovsdbserver-sb\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.789015 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7ff606-dfd3-4012-ad37-d96373c36ee8-config\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.789050 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e7ff606-dfd3-4012-ad37-d96373c36ee8-ovsdbserver-nb\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.789079 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb92m\" (UniqueName: \"kubernetes.io/projected/8e7ff606-dfd3-4012-ad37-d96373c36ee8-kube-api-access-xb92m\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.789933 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e7ff606-dfd3-4012-ad37-d96373c36ee8-dns-svc\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.789935 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7ff606-dfd3-4012-ad37-d96373c36ee8-config\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.790328 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e7ff606-dfd3-4012-ad37-d96373c36ee8-ovsdbserver-sb\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.790328 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e7ff606-dfd3-4012-ad37-d96373c36ee8-ovsdbserver-nb\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.807529 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb92m\" (UniqueName: \"kubernetes.io/projected/8e7ff606-dfd3-4012-ad37-d96373c36ee8-kube-api-access-xb92m\") pod \"dnsmasq-dns-59dfd5fdc9-m86d8\" (UID: \"8e7ff606-dfd3-4012-ad37-d96373c36ee8\") " pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:46 crc kubenswrapper[5002]: I1203 18:11:46.938027 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:47 crc kubenswrapper[5002]: I1203 18:11:47.429708 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59dfd5fdc9-m86d8"] Dec 03 18:11:47 crc kubenswrapper[5002]: W1203 18:11:47.431209 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e7ff606_dfd3_4012_ad37_d96373c36ee8.slice/crio-49c553799447a830daacc02ebbd011cf014b1cdf815a9bb3b257f4e4de5ab2ba WatchSource:0}: Error finding container 49c553799447a830daacc02ebbd011cf014b1cdf815a9bb3b257f4e4de5ab2ba: Status 404 returned error can't find the container with id 49c553799447a830daacc02ebbd011cf014b1cdf815a9bb3b257f4e4de5ab2ba Dec 03 18:11:48 crc kubenswrapper[5002]: I1203 18:11:48.416539 5002 generic.go:334] "Generic (PLEG): container finished" podID="8e7ff606-dfd3-4012-ad37-d96373c36ee8" containerID="0e4211003dc34dfabaf6303fbba6ec733634f1b0c58860f011d02a69b234a06d" exitCode=0 Dec 03 18:11:48 crc kubenswrapper[5002]: I1203 18:11:48.416624 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" event={"ID":"8e7ff606-dfd3-4012-ad37-d96373c36ee8","Type":"ContainerDied","Data":"0e4211003dc34dfabaf6303fbba6ec733634f1b0c58860f011d02a69b234a06d"} Dec 03 18:11:48 crc kubenswrapper[5002]: I1203 18:11:48.417016 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" event={"ID":"8e7ff606-dfd3-4012-ad37-d96373c36ee8","Type":"ContainerStarted","Data":"49c553799447a830daacc02ebbd011cf014b1cdf815a9bb3b257f4e4de5ab2ba"} Dec 03 18:11:49 crc kubenswrapper[5002]: I1203 18:11:49.091586 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:11:49 crc kubenswrapper[5002]: I1203 18:11:49.427933 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="394bff6c-5336-4f5e-a786-69a38c259f58" containerName="nova-api-log" containerID="cri-o://23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e" gracePeriod=30 Dec 03 18:11:49 crc kubenswrapper[5002]: I1203 18:11:49.428425 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="394bff6c-5336-4f5e-a786-69a38c259f58" containerName="nova-api-api" containerID="cri-o://7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b" gracePeriod=30 Dec 03 18:11:49 crc kubenswrapper[5002]: I1203 18:11:49.428594 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" event={"ID":"8e7ff606-dfd3-4012-ad37-d96373c36ee8","Type":"ContainerStarted","Data":"c02980cb022d333cfa8742f09d05a4480162938be2540fd44162dd139a340a7e"} Dec 03 18:11:49 crc kubenswrapper[5002]: I1203 18:11:49.428880 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:49 crc kubenswrapper[5002]: I1203 18:11:49.461352 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" podStartSLOduration=3.461328361 podStartE2EDuration="3.461328361s" podCreationTimestamp="2025-12-03 18:11:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:11:49.451865266 +0000 UTC m=+6032.865687164" watchObservedRunningTime="2025-12-03 18:11:49.461328361 +0000 UTC m=+6032.875150249" Dec 03 18:11:49 crc kubenswrapper[5002]: I1203 18:11:49.839849 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:11:49 crc kubenswrapper[5002]: E1203 18:11:49.840204 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:11:50 crc kubenswrapper[5002]: I1203 18:11:50.441283 5002 generic.go:334] "Generic (PLEG): container finished" podID="394bff6c-5336-4f5e-a786-69a38c259f58" containerID="23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e" exitCode=143 Dec 03 18:11:50 crc kubenswrapper[5002]: I1203 18:11:50.441341 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"394bff6c-5336-4f5e-a786-69a38c259f58","Type":"ContainerDied","Data":"23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e"} Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.000211 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.122263 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/394bff6c-5336-4f5e-a786-69a38c259f58-combined-ca-bundle\") pod \"394bff6c-5336-4f5e-a786-69a38c259f58\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.122433 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/394bff6c-5336-4f5e-a786-69a38c259f58-config-data\") pod \"394bff6c-5336-4f5e-a786-69a38c259f58\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.122538 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/394bff6c-5336-4f5e-a786-69a38c259f58-logs\") pod \"394bff6c-5336-4f5e-a786-69a38c259f58\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.122636 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66ltw\" (UniqueName: \"kubernetes.io/projected/394bff6c-5336-4f5e-a786-69a38c259f58-kube-api-access-66ltw\") pod \"394bff6c-5336-4f5e-a786-69a38c259f58\" (UID: \"394bff6c-5336-4f5e-a786-69a38c259f58\") " Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.123111 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/394bff6c-5336-4f5e-a786-69a38c259f58-logs" (OuterVolumeSpecName: "logs") pod "394bff6c-5336-4f5e-a786-69a38c259f58" (UID: "394bff6c-5336-4f5e-a786-69a38c259f58"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.129141 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/394bff6c-5336-4f5e-a786-69a38c259f58-kube-api-access-66ltw" (OuterVolumeSpecName: "kube-api-access-66ltw") pod "394bff6c-5336-4f5e-a786-69a38c259f58" (UID: "394bff6c-5336-4f5e-a786-69a38c259f58"). InnerVolumeSpecName "kube-api-access-66ltw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.154897 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/394bff6c-5336-4f5e-a786-69a38c259f58-config-data" (OuterVolumeSpecName: "config-data") pod "394bff6c-5336-4f5e-a786-69a38c259f58" (UID: "394bff6c-5336-4f5e-a786-69a38c259f58"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.158792 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/394bff6c-5336-4f5e-a786-69a38c259f58-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "394bff6c-5336-4f5e-a786-69a38c259f58" (UID: "394bff6c-5336-4f5e-a786-69a38c259f58"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.224640 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66ltw\" (UniqueName: \"kubernetes.io/projected/394bff6c-5336-4f5e-a786-69a38c259f58-kube-api-access-66ltw\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.224886 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/394bff6c-5336-4f5e-a786-69a38c259f58-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.224966 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/394bff6c-5336-4f5e-a786-69a38c259f58-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.225033 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/394bff6c-5336-4f5e-a786-69a38c259f58-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.466419 5002 generic.go:334] "Generic (PLEG): container finished" podID="394bff6c-5336-4f5e-a786-69a38c259f58" containerID="7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b" exitCode=0 Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.466469 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"394bff6c-5336-4f5e-a786-69a38c259f58","Type":"ContainerDied","Data":"7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b"} Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.466737 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"394bff6c-5336-4f5e-a786-69a38c259f58","Type":"ContainerDied","Data":"1874a94fc89392d06dd873e750f12449dea31f3286ea158888b9de14aa3392c0"} Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.466789 5002 scope.go:117] "RemoveContainer" containerID="7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.466497 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.497066 5002 scope.go:117] "RemoveContainer" containerID="23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.515378 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.533857 5002 scope.go:117] "RemoveContainer" containerID="7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.536036 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:11:53 crc kubenswrapper[5002]: E1203 18:11:53.537563 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b\": container with ID starting with 7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b not found: ID does not exist" containerID="7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.537600 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b"} err="failed to get container status \"7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b\": rpc error: code = NotFound desc = could not find container \"7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b\": container with ID starting with 7b62b956a1b9738b48aef8e9ae4a0c08c62e303cca2f30ac29c94e67f5365b6b not found: ID does not exist" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.537634 5002 scope.go:117] "RemoveContainer" containerID="23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e" Dec 03 18:11:53 crc kubenswrapper[5002]: E1203 18:11:53.541099 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e\": container with ID starting with 23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e not found: ID does not exist" containerID="23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.541163 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e"} err="failed to get container status \"23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e\": rpc error: code = NotFound desc = could not find container \"23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e\": container with ID starting with 23e7075e80c0dd5fe34a52030e7a0d5c2980cd68efa31864381735d326569a9e not found: ID does not exist" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.549637 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 18:11:53 crc kubenswrapper[5002]: E1203 18:11:53.550229 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="394bff6c-5336-4f5e-a786-69a38c259f58" containerName="nova-api-api" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.550248 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="394bff6c-5336-4f5e-a786-69a38c259f58" containerName="nova-api-api" Dec 03 18:11:53 crc kubenswrapper[5002]: E1203 18:11:53.550280 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="394bff6c-5336-4f5e-a786-69a38c259f58" containerName="nova-api-log" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.550289 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="394bff6c-5336-4f5e-a786-69a38c259f58" containerName="nova-api-log" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.550518 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="394bff6c-5336-4f5e-a786-69a38c259f58" containerName="nova-api-api" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.550549 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="394bff6c-5336-4f5e-a786-69a38c259f58" containerName="nova-api-log" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.551790 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.554451 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.554945 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.555309 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.578412 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.631768 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkpdb\" (UniqueName: \"kubernetes.io/projected/db27d6a5-7d81-448f-9aa1-eff2c429c52c-kube-api-access-hkpdb\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.632133 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db27d6a5-7d81-448f-9aa1-eff2c429c52c-logs\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.632245 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/db27d6a5-7d81-448f-9aa1-eff2c429c52c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.632332 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db27d6a5-7d81-448f-9aa1-eff2c429c52c-config-data\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.632438 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db27d6a5-7d81-448f-9aa1-eff2c429c52c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.632509 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/db27d6a5-7d81-448f-9aa1-eff2c429c52c-public-tls-certs\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.733940 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/db27d6a5-7d81-448f-9aa1-eff2c429c52c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.733996 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db27d6a5-7d81-448f-9aa1-eff2c429c52c-config-data\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.734065 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db27d6a5-7d81-448f-9aa1-eff2c429c52c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.734084 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/db27d6a5-7d81-448f-9aa1-eff2c429c52c-public-tls-certs\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.734178 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkpdb\" (UniqueName: \"kubernetes.io/projected/db27d6a5-7d81-448f-9aa1-eff2c429c52c-kube-api-access-hkpdb\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.734248 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db27d6a5-7d81-448f-9aa1-eff2c429c52c-logs\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.734623 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db27d6a5-7d81-448f-9aa1-eff2c429c52c-logs\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.739177 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db27d6a5-7d81-448f-9aa1-eff2c429c52c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.739854 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db27d6a5-7d81-448f-9aa1-eff2c429c52c-config-data\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.741659 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/db27d6a5-7d81-448f-9aa1-eff2c429c52c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.745604 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/db27d6a5-7d81-448f-9aa1-eff2c429c52c-public-tls-certs\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.759173 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkpdb\" (UniqueName: \"kubernetes.io/projected/db27d6a5-7d81-448f-9aa1-eff2c429c52c-kube-api-access-hkpdb\") pod \"nova-api-0\" (UID: \"db27d6a5-7d81-448f-9aa1-eff2c429c52c\") " pod="openstack/nova-api-0" Dec 03 18:11:53 crc kubenswrapper[5002]: I1203 18:11:53.881361 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 18:11:54 crc kubenswrapper[5002]: I1203 18:11:54.352240 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 18:11:54 crc kubenswrapper[5002]: I1203 18:11:54.478063 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"db27d6a5-7d81-448f-9aa1-eff2c429c52c","Type":"ContainerStarted","Data":"1acb9e55f1288e082422a8f7a672f5931d412f4cccc815408e673181f78adcac"} Dec 03 18:11:54 crc kubenswrapper[5002]: I1203 18:11:54.876645 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="394bff6c-5336-4f5e-a786-69a38c259f58" path="/var/lib/kubelet/pods/394bff6c-5336-4f5e-a786-69a38c259f58/volumes" Dec 03 18:11:55 crc kubenswrapper[5002]: I1203 18:11:55.492503 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"db27d6a5-7d81-448f-9aa1-eff2c429c52c","Type":"ContainerStarted","Data":"3257624fe2a0aa5d49353ba7e719a2645daa6e5d02b04a9a00b63c870506d114"} Dec 03 18:11:55 crc kubenswrapper[5002]: I1203 18:11:55.492556 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"db27d6a5-7d81-448f-9aa1-eff2c429c52c","Type":"ContainerStarted","Data":"8e9fbe9774f63f5b913b3878d039b0f0ad4dfabd34f70267381951bf3f9971e7"} Dec 03 18:11:55 crc kubenswrapper[5002]: I1203 18:11:55.526514 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.526494049 podStartE2EDuration="2.526494049s" podCreationTimestamp="2025-12-03 18:11:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:11:55.511961459 +0000 UTC m=+6038.925783437" watchObservedRunningTime="2025-12-03 18:11:55.526494049 +0000 UTC m=+6038.940315937" Dec 03 18:11:56 crc kubenswrapper[5002]: I1203 18:11:56.939930 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59dfd5fdc9-m86d8" Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.007717 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77df6d7ff9-l944g"] Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.008295 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" podUID="8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" containerName="dnsmasq-dns" containerID="cri-o://de5a4bbe1a6a67967de86ec2b7070676b671e9d6acbd35930c0a654261bbf5f4" gracePeriod=10 Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.515143 5002 generic.go:334] "Generic (PLEG): container finished" podID="8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" containerID="de5a4bbe1a6a67967de86ec2b7070676b671e9d6acbd35930c0a654261bbf5f4" exitCode=0 Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.515198 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" event={"ID":"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965","Type":"ContainerDied","Data":"de5a4bbe1a6a67967de86ec2b7070676b671e9d6acbd35930c0a654261bbf5f4"} Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.515229 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" event={"ID":"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965","Type":"ContainerDied","Data":"53b3e8a385e356989694594536d46b97a1c1d7728634fbad89dc6396a0167d4c"} Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.515242 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53b3e8a385e356989694594536d46b97a1c1d7728634fbad89dc6396a0167d4c" Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.540284 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.623376 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-ovsdbserver-nb\") pod \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.623923 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-config\") pod \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.623986 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-dns-svc\") pod \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.624020 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-ovsdbserver-sb\") pod \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.624779 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjq7c\" (UniqueName: \"kubernetes.io/projected/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-kube-api-access-wjq7c\") pod \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\" (UID: \"8b7e9b68-a2b3-40b1-9b3c-6d7e91978965\") " Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.631980 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-kube-api-access-wjq7c" (OuterVolumeSpecName: "kube-api-access-wjq7c") pod "8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" (UID: "8b7e9b68-a2b3-40b1-9b3c-6d7e91978965"). InnerVolumeSpecName "kube-api-access-wjq7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.688558 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" (UID: "8b7e9b68-a2b3-40b1-9b3c-6d7e91978965"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.700531 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-config" (OuterVolumeSpecName: "config") pod "8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" (UID: "8b7e9b68-a2b3-40b1-9b3c-6d7e91978965"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.700822 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" (UID: "8b7e9b68-a2b3-40b1-9b3c-6d7e91978965"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.709323 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" (UID: "8b7e9b68-a2b3-40b1-9b3c-6d7e91978965"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.726652 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.726860 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.726949 5002 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.727005 5002 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:57 crc kubenswrapper[5002]: I1203 18:11:57.727338 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjq7c\" (UniqueName: \"kubernetes.io/projected/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965-kube-api-access-wjq7c\") on node \"crc\" DevicePath \"\"" Dec 03 18:11:58 crc kubenswrapper[5002]: I1203 18:11:58.523209 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77df6d7ff9-l944g" Dec 03 18:11:58 crc kubenswrapper[5002]: I1203 18:11:58.577178 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77df6d7ff9-l944g"] Dec 03 18:11:58 crc kubenswrapper[5002]: I1203 18:11:58.584679 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77df6d7ff9-l944g"] Dec 03 18:11:58 crc kubenswrapper[5002]: I1203 18:11:58.854917 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" path="/var/lib/kubelet/pods/8b7e9b68-a2b3-40b1-9b3c-6d7e91978965/volumes" Dec 03 18:12:02 crc kubenswrapper[5002]: I1203 18:12:02.841625 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:12:02 crc kubenswrapper[5002]: E1203 18:12:02.842160 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:12:03 crc kubenswrapper[5002]: I1203 18:12:03.881859 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 18:12:03 crc kubenswrapper[5002]: I1203 18:12:03.882271 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 18:12:04 crc kubenswrapper[5002]: I1203 18:12:04.901003 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="db27d6a5-7d81-448f-9aa1-eff2c429c52c" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.91:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 18:12:04 crc kubenswrapper[5002]: I1203 18:12:04.901040 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="db27d6a5-7d81-448f-9aa1-eff2c429c52c" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.91:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 18:12:13 crc kubenswrapper[5002]: I1203 18:12:13.889047 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 18:12:13 crc kubenswrapper[5002]: I1203 18:12:13.890663 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 18:12:13 crc kubenswrapper[5002]: I1203 18:12:13.894705 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 18:12:13 crc kubenswrapper[5002]: I1203 18:12:13.899152 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 18:12:14 crc kubenswrapper[5002]: I1203 18:12:14.688854 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 18:12:14 crc kubenswrapper[5002]: I1203 18:12:14.697065 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 18:12:15 crc kubenswrapper[5002]: I1203 18:12:15.841367 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:12:15 crc kubenswrapper[5002]: E1203 18:12:15.843141 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:12:30 crc kubenswrapper[5002]: I1203 18:12:30.840791 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:12:30 crc kubenswrapper[5002]: E1203 18:12:30.841666 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.459812 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-9l9t9"] Dec 03 18:12:37 crc kubenswrapper[5002]: E1203 18:12:37.460945 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" containerName="dnsmasq-dns" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.460966 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" containerName="dnsmasq-dns" Dec 03 18:12:37 crc kubenswrapper[5002]: E1203 18:12:37.460992 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" containerName="init" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.460999 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" containerName="init" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.461230 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b7e9b68-a2b3-40b1-9b3c-6d7e91978965" containerName="dnsmasq-dns" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.462960 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.470505 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-hjn9k" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.470813 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.471838 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-rncbs"] Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.473362 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.478064 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.497422 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-rncbs"] Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.517053 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-9l9t9"] Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535330 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d660dfd-a3db-4786-a42e-33169069d286-scripts\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535381 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq9v9\" (UniqueName: \"kubernetes.io/projected/0d660dfd-a3db-4786-a42e-33169069d286-kube-api-access-wq9v9\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535415 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e5d56718-3545-46f8-a9f5-a457c4255b0d-ovn-controller-tls-certs\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535460 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v2t9\" (UniqueName: \"kubernetes.io/projected/e5d56718-3545-46f8-a9f5-a457c4255b0d-kube-api-access-5v2t9\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535501 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0d660dfd-a3db-4786-a42e-33169069d286-var-log\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535551 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0d660dfd-a3db-4786-a42e-33169069d286-var-lib\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535581 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e5d56718-3545-46f8-a9f5-a457c4255b0d-var-log-ovn\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535609 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e5d56718-3545-46f8-a9f5-a457c4255b0d-var-run\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535642 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0d660dfd-a3db-4786-a42e-33169069d286-var-run\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535675 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0d660dfd-a3db-4786-a42e-33169069d286-etc-ovs\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535699 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e5d56718-3545-46f8-a9f5-a457c4255b0d-scripts\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535875 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e5d56718-3545-46f8-a9f5-a457c4255b0d-var-run-ovn\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.535991 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d56718-3545-46f8-a9f5-a457c4255b0d-combined-ca-bundle\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.637456 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e5d56718-3545-46f8-a9f5-a457c4255b0d-var-log-ovn\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.637768 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e5d56718-3545-46f8-a9f5-a457c4255b0d-var-run\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.637805 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0d660dfd-a3db-4786-a42e-33169069d286-var-run\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.637834 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0d660dfd-a3db-4786-a42e-33169069d286-etc-ovs\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.637860 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e5d56718-3545-46f8-a9f5-a457c4255b0d-scripts\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.637892 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e5d56718-3545-46f8-a9f5-a457c4255b0d-var-run-ovn\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.637936 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d56718-3545-46f8-a9f5-a457c4255b0d-combined-ca-bundle\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.637990 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d660dfd-a3db-4786-a42e-33169069d286-scripts\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.638036 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq9v9\" (UniqueName: \"kubernetes.io/projected/0d660dfd-a3db-4786-a42e-33169069d286-kube-api-access-wq9v9\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.638072 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e5d56718-3545-46f8-a9f5-a457c4255b0d-ovn-controller-tls-certs\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.638110 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v2t9\" (UniqueName: \"kubernetes.io/projected/e5d56718-3545-46f8-a9f5-a457c4255b0d-kube-api-access-5v2t9\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.638149 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0d660dfd-a3db-4786-a42e-33169069d286-var-log\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.638199 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0d660dfd-a3db-4786-a42e-33169069d286-var-lib\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.638208 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0d660dfd-a3db-4786-a42e-33169069d286-var-run\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.638261 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e5d56718-3545-46f8-a9f5-a457c4255b0d-var-log-ovn\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.638305 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0d660dfd-a3db-4786-a42e-33169069d286-etc-ovs\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.638672 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0d660dfd-a3db-4786-a42e-33169069d286-var-log\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.638707 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e5d56718-3545-46f8-a9f5-a457c4255b0d-var-run\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.638737 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e5d56718-3545-46f8-a9f5-a457c4255b0d-var-run-ovn\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.638818 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0d660dfd-a3db-4786-a42e-33169069d286-var-lib\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.640403 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d660dfd-a3db-4786-a42e-33169069d286-scripts\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.644068 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e5d56718-3545-46f8-a9f5-a457c4255b0d-scripts\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.652802 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e5d56718-3545-46f8-a9f5-a457c4255b0d-ovn-controller-tls-certs\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.655687 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d56718-3545-46f8-a9f5-a457c4255b0d-combined-ca-bundle\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.658178 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v2t9\" (UniqueName: \"kubernetes.io/projected/e5d56718-3545-46f8-a9f5-a457c4255b0d-kube-api-access-5v2t9\") pod \"ovn-controller-rncbs\" (UID: \"e5d56718-3545-46f8-a9f5-a457c4255b0d\") " pod="openstack/ovn-controller-rncbs" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.669472 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq9v9\" (UniqueName: \"kubernetes.io/projected/0d660dfd-a3db-4786-a42e-33169069d286-kube-api-access-wq9v9\") pod \"ovn-controller-ovs-9l9t9\" (UID: \"0d660dfd-a3db-4786-a42e-33169069d286\") " pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.794936 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:37 crc kubenswrapper[5002]: I1203 18:12:37.805168 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rncbs" Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.370732 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-rncbs"] Dec 03 18:12:38 crc kubenswrapper[5002]: W1203 18:12:38.720600 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d660dfd_a3db_4786_a42e_33169069d286.slice/crio-083d77943242de50a354a06684ce432c86ef5c1f08db945c3583f9c1d16072af WatchSource:0}: Error finding container 083d77943242de50a354a06684ce432c86ef5c1f08db945c3583f9c1d16072af: Status 404 returned error can't find the container with id 083d77943242de50a354a06684ce432c86ef5c1f08db945c3583f9c1d16072af Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.724075 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-9l9t9"] Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.867271 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-6hmtl"] Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.880260 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-6hmtl"] Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.880383 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.882969 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.967698 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9l9t9" event={"ID":"0d660dfd-a3db-4786-a42e-33169069d286","Type":"ContainerStarted","Data":"083d77943242de50a354a06684ce432c86ef5c1f08db945c3583f9c1d16072af"} Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.969674 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rncbs" event={"ID":"e5d56718-3545-46f8-a9f5-a457c4255b0d","Type":"ContainerStarted","Data":"e3e8257a81001841fe2747a7f57dd36183ff7fae8e547428aca4e66a8805d99a"} Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.976434 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/29c9f160-f344-443a-ad8b-854333e89938-ovs-rundir\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.976501 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29c9f160-f344-443a-ad8b-854333e89938-config\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.976726 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/29c9f160-f344-443a-ad8b-854333e89938-ovn-rundir\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.977247 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c9f160-f344-443a-ad8b-854333e89938-combined-ca-bundle\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.977605 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wbzw\" (UniqueName: \"kubernetes.io/projected/29c9f160-f344-443a-ad8b-854333e89938-kube-api-access-9wbzw\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:38 crc kubenswrapper[5002]: I1203 18:12:38.977852 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/29c9f160-f344-443a-ad8b-854333e89938-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.080149 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c9f160-f344-443a-ad8b-854333e89938-combined-ca-bundle\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.080237 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wbzw\" (UniqueName: \"kubernetes.io/projected/29c9f160-f344-443a-ad8b-854333e89938-kube-api-access-9wbzw\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.080290 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/29c9f160-f344-443a-ad8b-854333e89938-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.080318 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/29c9f160-f344-443a-ad8b-854333e89938-ovs-rundir\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.080347 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29c9f160-f344-443a-ad8b-854333e89938-config\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.080375 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/29c9f160-f344-443a-ad8b-854333e89938-ovn-rundir\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.080742 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/29c9f160-f344-443a-ad8b-854333e89938-ovn-rundir\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.080773 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/29c9f160-f344-443a-ad8b-854333e89938-ovs-rundir\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.081197 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29c9f160-f344-443a-ad8b-854333e89938-config\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.086092 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c9f160-f344-443a-ad8b-854333e89938-combined-ca-bundle\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.086429 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/29c9f160-f344-443a-ad8b-854333e89938-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.100691 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wbzw\" (UniqueName: \"kubernetes.io/projected/29c9f160-f344-443a-ad8b-854333e89938-kube-api-access-9wbzw\") pod \"ovn-controller-metrics-6hmtl\" (UID: \"29c9f160-f344-443a-ad8b-854333e89938\") " pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.201566 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-6hmtl" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.469250 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-create-pgbtx"] Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.471397 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-pgbtx" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.482876 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-pgbtx"] Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.589280 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl7ql\" (UniqueName: \"kubernetes.io/projected/e0a46695-010b-4d5a-96a7-d5e765f77829-kube-api-access-dl7ql\") pod \"octavia-db-create-pgbtx\" (UID: \"e0a46695-010b-4d5a-96a7-d5e765f77829\") " pod="openstack/octavia-db-create-pgbtx" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.589646 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0a46695-010b-4d5a-96a7-d5e765f77829-operator-scripts\") pod \"octavia-db-create-pgbtx\" (UID: \"e0a46695-010b-4d5a-96a7-d5e765f77829\") " pod="openstack/octavia-db-create-pgbtx" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.692782 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl7ql\" (UniqueName: \"kubernetes.io/projected/e0a46695-010b-4d5a-96a7-d5e765f77829-kube-api-access-dl7ql\") pod \"octavia-db-create-pgbtx\" (UID: \"e0a46695-010b-4d5a-96a7-d5e765f77829\") " pod="openstack/octavia-db-create-pgbtx" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.692843 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0a46695-010b-4d5a-96a7-d5e765f77829-operator-scripts\") pod \"octavia-db-create-pgbtx\" (UID: \"e0a46695-010b-4d5a-96a7-d5e765f77829\") " pod="openstack/octavia-db-create-pgbtx" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.693784 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0a46695-010b-4d5a-96a7-d5e765f77829-operator-scripts\") pod \"octavia-db-create-pgbtx\" (UID: \"e0a46695-010b-4d5a-96a7-d5e765f77829\") " pod="openstack/octavia-db-create-pgbtx" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.719429 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl7ql\" (UniqueName: \"kubernetes.io/projected/e0a46695-010b-4d5a-96a7-d5e765f77829-kube-api-access-dl7ql\") pod \"octavia-db-create-pgbtx\" (UID: \"e0a46695-010b-4d5a-96a7-d5e765f77829\") " pod="openstack/octavia-db-create-pgbtx" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.832926 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-pgbtx" Dec 03 18:12:39 crc kubenswrapper[5002]: I1203 18:12:39.851313 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-6hmtl"] Dec 03 18:12:40 crc kubenswrapper[5002]: I1203 18:12:40.007931 5002 generic.go:334] "Generic (PLEG): container finished" podID="0d660dfd-a3db-4786-a42e-33169069d286" containerID="6b23228219c7739e7f4231b91a05e569de55dea6e2d68cba254dedd00240d2fe" exitCode=0 Dec 03 18:12:40 crc kubenswrapper[5002]: I1203 18:12:40.009016 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9l9t9" event={"ID":"0d660dfd-a3db-4786-a42e-33169069d286","Type":"ContainerDied","Data":"6b23228219c7739e7f4231b91a05e569de55dea6e2d68cba254dedd00240d2fe"} Dec 03 18:12:40 crc kubenswrapper[5002]: I1203 18:12:40.014618 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-6hmtl" event={"ID":"29c9f160-f344-443a-ad8b-854333e89938","Type":"ContainerStarted","Data":"17ef6a4f26250e4d8ecb0b4a740f1d4c7b246549d2300ec81eac4054779a04b3"} Dec 03 18:12:40 crc kubenswrapper[5002]: I1203 18:12:40.043172 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rncbs" event={"ID":"e5d56718-3545-46f8-a9f5-a457c4255b0d","Type":"ContainerStarted","Data":"3e61589d67a06b9c198b08861a1083932cb11bf475d6524200e6f37cda57123d"} Dec 03 18:12:40 crc kubenswrapper[5002]: I1203 18:12:40.044071 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-rncbs" Dec 03 18:12:40 crc kubenswrapper[5002]: I1203 18:12:40.065035 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-rncbs" podStartSLOduration=3.065011831 podStartE2EDuration="3.065011831s" podCreationTimestamp="2025-12-03 18:12:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:12:40.064080487 +0000 UTC m=+6083.477902375" watchObservedRunningTime="2025-12-03 18:12:40.065011831 +0000 UTC m=+6083.478833719" Dec 03 18:12:40 crc kubenswrapper[5002]: I1203 18:12:40.361975 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-pgbtx"] Dec 03 18:12:40 crc kubenswrapper[5002]: W1203 18:12:40.370214 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0a46695_010b_4d5a_96a7_d5e765f77829.slice/crio-a4528992145e7ce81c92d288493eb1f68f628160a816c53c6e6b6773ead96075 WatchSource:0}: Error finding container a4528992145e7ce81c92d288493eb1f68f628160a816c53c6e6b6773ead96075: Status 404 returned error can't find the container with id a4528992145e7ce81c92d288493eb1f68f628160a816c53c6e6b6773ead96075 Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.052363 5002 generic.go:334] "Generic (PLEG): container finished" podID="e0a46695-010b-4d5a-96a7-d5e765f77829" containerID="25cac7748370be4fff083e629514384611540c94e73c0ba6fdc51aa139aaa2b5" exitCode=0 Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.052417 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-pgbtx" event={"ID":"e0a46695-010b-4d5a-96a7-d5e765f77829","Type":"ContainerDied","Data":"25cac7748370be4fff083e629514384611540c94e73c0ba6fdc51aa139aaa2b5"} Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.052727 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-pgbtx" event={"ID":"e0a46695-010b-4d5a-96a7-d5e765f77829","Type":"ContainerStarted","Data":"a4528992145e7ce81c92d288493eb1f68f628160a816c53c6e6b6773ead96075"} Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.055847 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9l9t9" event={"ID":"0d660dfd-a3db-4786-a42e-33169069d286","Type":"ContainerStarted","Data":"8a63e9b6d028c0ad2014339a89e329977d657b3ece70822b8340c804a6059588"} Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.055878 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9l9t9" event={"ID":"0d660dfd-a3db-4786-a42e-33169069d286","Type":"ContainerStarted","Data":"a6025bb1ad6055212e8a9fb74c7f588d28359245964ae293f9248610963de5b6"} Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.055910 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.055930 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.058217 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-6hmtl" event={"ID":"29c9f160-f344-443a-ad8b-854333e89938","Type":"ContainerStarted","Data":"94fff99c01c18a6c4f77a454d5636b02ae4b1e432f50071db0dd7dea59ee72c8"} Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.106664 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-9l9t9" podStartSLOduration=4.106645632 podStartE2EDuration="4.106645632s" podCreationTimestamp="2025-12-03 18:12:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:12:41.100409095 +0000 UTC m=+6084.514230983" watchObservedRunningTime="2025-12-03 18:12:41.106645632 +0000 UTC m=+6084.520467510" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.109128 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-6hmtl" podStartSLOduration=3.109121309 podStartE2EDuration="3.109121309s" podCreationTimestamp="2025-12-03 18:12:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:12:41.081699361 +0000 UTC m=+6084.495521269" watchObservedRunningTime="2025-12-03 18:12:41.109121309 +0000 UTC m=+6084.522943197" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.313806 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-074b-account-create-update-76rsl"] Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.317904 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-074b-account-create-update-76rsl" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.321581 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-db-secret" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.329899 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76498b2c-a00b-48ef-b1b2-52ed87922f4d-operator-scripts\") pod \"octavia-074b-account-create-update-76rsl\" (UID: \"76498b2c-a00b-48ef-b1b2-52ed87922f4d\") " pod="openstack/octavia-074b-account-create-update-76rsl" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.330338 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jbjl\" (UniqueName: \"kubernetes.io/projected/76498b2c-a00b-48ef-b1b2-52ed87922f4d-kube-api-access-7jbjl\") pod \"octavia-074b-account-create-update-76rsl\" (UID: \"76498b2c-a00b-48ef-b1b2-52ed87922f4d\") " pod="openstack/octavia-074b-account-create-update-76rsl" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.354399 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-074b-account-create-update-76rsl"] Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.432257 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jbjl\" (UniqueName: \"kubernetes.io/projected/76498b2c-a00b-48ef-b1b2-52ed87922f4d-kube-api-access-7jbjl\") pod \"octavia-074b-account-create-update-76rsl\" (UID: \"76498b2c-a00b-48ef-b1b2-52ed87922f4d\") " pod="openstack/octavia-074b-account-create-update-76rsl" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.432373 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76498b2c-a00b-48ef-b1b2-52ed87922f4d-operator-scripts\") pod \"octavia-074b-account-create-update-76rsl\" (UID: \"76498b2c-a00b-48ef-b1b2-52ed87922f4d\") " pod="openstack/octavia-074b-account-create-update-76rsl" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.433280 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76498b2c-a00b-48ef-b1b2-52ed87922f4d-operator-scripts\") pod \"octavia-074b-account-create-update-76rsl\" (UID: \"76498b2c-a00b-48ef-b1b2-52ed87922f4d\") " pod="openstack/octavia-074b-account-create-update-76rsl" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.453251 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jbjl\" (UniqueName: \"kubernetes.io/projected/76498b2c-a00b-48ef-b1b2-52ed87922f4d-kube-api-access-7jbjl\") pod \"octavia-074b-account-create-update-76rsl\" (UID: \"76498b2c-a00b-48ef-b1b2-52ed87922f4d\") " pod="openstack/octavia-074b-account-create-update-76rsl" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.691826 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-074b-account-create-update-76rsl" Dec 03 18:12:41 crc kubenswrapper[5002]: I1203 18:12:41.840314 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:12:41 crc kubenswrapper[5002]: E1203 18:12:41.840947 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:12:42 crc kubenswrapper[5002]: I1203 18:12:42.174545 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-074b-account-create-update-76rsl"] Dec 03 18:12:42 crc kubenswrapper[5002]: I1203 18:12:42.409950 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-pgbtx" Dec 03 18:12:42 crc kubenswrapper[5002]: I1203 18:12:42.551850 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0a46695-010b-4d5a-96a7-d5e765f77829-operator-scripts\") pod \"e0a46695-010b-4d5a-96a7-d5e765f77829\" (UID: \"e0a46695-010b-4d5a-96a7-d5e765f77829\") " Dec 03 18:12:42 crc kubenswrapper[5002]: I1203 18:12:42.552153 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl7ql\" (UniqueName: \"kubernetes.io/projected/e0a46695-010b-4d5a-96a7-d5e765f77829-kube-api-access-dl7ql\") pod \"e0a46695-010b-4d5a-96a7-d5e765f77829\" (UID: \"e0a46695-010b-4d5a-96a7-d5e765f77829\") " Dec 03 18:12:42 crc kubenswrapper[5002]: I1203 18:12:42.552618 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0a46695-010b-4d5a-96a7-d5e765f77829-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e0a46695-010b-4d5a-96a7-d5e765f77829" (UID: "e0a46695-010b-4d5a-96a7-d5e765f77829"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:12:42 crc kubenswrapper[5002]: I1203 18:12:42.553108 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0a46695-010b-4d5a-96a7-d5e765f77829-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:12:42 crc kubenswrapper[5002]: I1203 18:12:42.557295 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0a46695-010b-4d5a-96a7-d5e765f77829-kube-api-access-dl7ql" (OuterVolumeSpecName: "kube-api-access-dl7ql") pod "e0a46695-010b-4d5a-96a7-d5e765f77829" (UID: "e0a46695-010b-4d5a-96a7-d5e765f77829"). InnerVolumeSpecName "kube-api-access-dl7ql". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:12:42 crc kubenswrapper[5002]: I1203 18:12:42.655307 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl7ql\" (UniqueName: \"kubernetes.io/projected/e0a46695-010b-4d5a-96a7-d5e765f77829-kube-api-access-dl7ql\") on node \"crc\" DevicePath \"\"" Dec 03 18:12:43 crc kubenswrapper[5002]: I1203 18:12:43.080260 5002 generic.go:334] "Generic (PLEG): container finished" podID="76498b2c-a00b-48ef-b1b2-52ed87922f4d" containerID="d18307feb766083fb7706fc9f091a550449b218e8af9efd6e450b86141af2149" exitCode=0 Dec 03 18:12:43 crc kubenswrapper[5002]: I1203 18:12:43.080341 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-074b-account-create-update-76rsl" event={"ID":"76498b2c-a00b-48ef-b1b2-52ed87922f4d","Type":"ContainerDied","Data":"d18307feb766083fb7706fc9f091a550449b218e8af9efd6e450b86141af2149"} Dec 03 18:12:43 crc kubenswrapper[5002]: I1203 18:12:43.080368 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-074b-account-create-update-76rsl" event={"ID":"76498b2c-a00b-48ef-b1b2-52ed87922f4d","Type":"ContainerStarted","Data":"16b1d3fab374c13c20e7707996d61e6e13caf312dd906731d3224da575313859"} Dec 03 18:12:43 crc kubenswrapper[5002]: I1203 18:12:43.082679 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-pgbtx" event={"ID":"e0a46695-010b-4d5a-96a7-d5e765f77829","Type":"ContainerDied","Data":"a4528992145e7ce81c92d288493eb1f68f628160a816c53c6e6b6773ead96075"} Dec 03 18:12:43 crc kubenswrapper[5002]: I1203 18:12:43.082710 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4528992145e7ce81c92d288493eb1f68f628160a816c53c6e6b6773ead96075" Dec 03 18:12:43 crc kubenswrapper[5002]: I1203 18:12:43.082738 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-pgbtx" Dec 03 18:12:44 crc kubenswrapper[5002]: I1203 18:12:44.513686 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-074b-account-create-update-76rsl" Dec 03 18:12:44 crc kubenswrapper[5002]: I1203 18:12:44.696459 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jbjl\" (UniqueName: \"kubernetes.io/projected/76498b2c-a00b-48ef-b1b2-52ed87922f4d-kube-api-access-7jbjl\") pod \"76498b2c-a00b-48ef-b1b2-52ed87922f4d\" (UID: \"76498b2c-a00b-48ef-b1b2-52ed87922f4d\") " Dec 03 18:12:44 crc kubenswrapper[5002]: I1203 18:12:44.696936 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76498b2c-a00b-48ef-b1b2-52ed87922f4d-operator-scripts\") pod \"76498b2c-a00b-48ef-b1b2-52ed87922f4d\" (UID: \"76498b2c-a00b-48ef-b1b2-52ed87922f4d\") " Dec 03 18:12:44 crc kubenswrapper[5002]: I1203 18:12:44.697429 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76498b2c-a00b-48ef-b1b2-52ed87922f4d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "76498b2c-a00b-48ef-b1b2-52ed87922f4d" (UID: "76498b2c-a00b-48ef-b1b2-52ed87922f4d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:12:44 crc kubenswrapper[5002]: I1203 18:12:44.697963 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76498b2c-a00b-48ef-b1b2-52ed87922f4d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:12:44 crc kubenswrapper[5002]: I1203 18:12:44.704014 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76498b2c-a00b-48ef-b1b2-52ed87922f4d-kube-api-access-7jbjl" (OuterVolumeSpecName: "kube-api-access-7jbjl") pod "76498b2c-a00b-48ef-b1b2-52ed87922f4d" (UID: "76498b2c-a00b-48ef-b1b2-52ed87922f4d"). InnerVolumeSpecName "kube-api-access-7jbjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:12:44 crc kubenswrapper[5002]: I1203 18:12:44.799985 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jbjl\" (UniqueName: \"kubernetes.io/projected/76498b2c-a00b-48ef-b1b2-52ed87922f4d-kube-api-access-7jbjl\") on node \"crc\" DevicePath \"\"" Dec 03 18:12:45 crc kubenswrapper[5002]: I1203 18:12:45.101254 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7f6a-account-create-update-lxrjp"] Dec 03 18:12:45 crc kubenswrapper[5002]: I1203 18:12:45.110007 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-074b-account-create-update-76rsl" event={"ID":"76498b2c-a00b-48ef-b1b2-52ed87922f4d","Type":"ContainerDied","Data":"16b1d3fab374c13c20e7707996d61e6e13caf312dd906731d3224da575313859"} Dec 03 18:12:45 crc kubenswrapper[5002]: I1203 18:12:45.110063 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16b1d3fab374c13c20e7707996d61e6e13caf312dd906731d3224da575313859" Dec 03 18:12:45 crc kubenswrapper[5002]: I1203 18:12:45.110133 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-074b-account-create-update-76rsl" Dec 03 18:12:45 crc kubenswrapper[5002]: I1203 18:12:45.118634 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-fmxnp"] Dec 03 18:12:45 crc kubenswrapper[5002]: I1203 18:12:45.129999 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-7f6a-account-create-update-lxrjp"] Dec 03 18:12:45 crc kubenswrapper[5002]: I1203 18:12:45.142613 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-fmxnp"] Dec 03 18:12:46 crc kubenswrapper[5002]: I1203 18:12:46.856396 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7a9fcc4-d145-47aa-bc44-f31309080419" path="/var/lib/kubelet/pods/a7a9fcc4-d145-47aa-bc44-f31309080419/volumes" Dec 03 18:12:46 crc kubenswrapper[5002]: I1203 18:12:46.857784 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f" path="/var/lib/kubelet/pods/caa55ef5-b64d-4782-9f9a-9ae57c8ecd5f/volumes" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.362947 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-persistence-db-create-t9vk8"] Dec 03 18:12:47 crc kubenswrapper[5002]: E1203 18:12:47.363609 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0a46695-010b-4d5a-96a7-d5e765f77829" containerName="mariadb-database-create" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.365074 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0a46695-010b-4d5a-96a7-d5e765f77829" containerName="mariadb-database-create" Dec 03 18:12:47 crc kubenswrapper[5002]: E1203 18:12:47.365144 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76498b2c-a00b-48ef-b1b2-52ed87922f4d" containerName="mariadb-account-create-update" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.365152 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="76498b2c-a00b-48ef-b1b2-52ed87922f4d" containerName="mariadb-account-create-update" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.365374 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0a46695-010b-4d5a-96a7-d5e765f77829" containerName="mariadb-database-create" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.365396 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="76498b2c-a00b-48ef-b1b2-52ed87922f4d" containerName="mariadb-account-create-update" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.366081 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-t9vk8" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.373167 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-t9vk8"] Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.411789 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sthx\" (UniqueName: \"kubernetes.io/projected/e9403328-ad80-45b1-96aa-3216038c0752-kube-api-access-8sthx\") pod \"octavia-persistence-db-create-t9vk8\" (UID: \"e9403328-ad80-45b1-96aa-3216038c0752\") " pod="openstack/octavia-persistence-db-create-t9vk8" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.412065 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9403328-ad80-45b1-96aa-3216038c0752-operator-scripts\") pod \"octavia-persistence-db-create-t9vk8\" (UID: \"e9403328-ad80-45b1-96aa-3216038c0752\") " pod="openstack/octavia-persistence-db-create-t9vk8" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.514141 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9403328-ad80-45b1-96aa-3216038c0752-operator-scripts\") pod \"octavia-persistence-db-create-t9vk8\" (UID: \"e9403328-ad80-45b1-96aa-3216038c0752\") " pod="openstack/octavia-persistence-db-create-t9vk8" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.514293 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sthx\" (UniqueName: \"kubernetes.io/projected/e9403328-ad80-45b1-96aa-3216038c0752-kube-api-access-8sthx\") pod \"octavia-persistence-db-create-t9vk8\" (UID: \"e9403328-ad80-45b1-96aa-3216038c0752\") " pod="openstack/octavia-persistence-db-create-t9vk8" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.514973 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9403328-ad80-45b1-96aa-3216038c0752-operator-scripts\") pod \"octavia-persistence-db-create-t9vk8\" (UID: \"e9403328-ad80-45b1-96aa-3216038c0752\") " pod="openstack/octavia-persistence-db-create-t9vk8" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.538397 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sthx\" (UniqueName: \"kubernetes.io/projected/e9403328-ad80-45b1-96aa-3216038c0752-kube-api-access-8sthx\") pod \"octavia-persistence-db-create-t9vk8\" (UID: \"e9403328-ad80-45b1-96aa-3216038c0752\") " pod="openstack/octavia-persistence-db-create-t9vk8" Dec 03 18:12:47 crc kubenswrapper[5002]: I1203 18:12:47.689232 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-t9vk8" Dec 03 18:12:48 crc kubenswrapper[5002]: I1203 18:12:48.153644 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-t9vk8"] Dec 03 18:12:48 crc kubenswrapper[5002]: I1203 18:12:48.303345 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-61c3-account-create-update-lpqs7"] Dec 03 18:12:48 crc kubenswrapper[5002]: I1203 18:12:48.305118 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-61c3-account-create-update-lpqs7" Dec 03 18:12:48 crc kubenswrapper[5002]: I1203 18:12:48.307679 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Dec 03 18:12:48 crc kubenswrapper[5002]: I1203 18:12:48.317405 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-61c3-account-create-update-lpqs7"] Dec 03 18:12:48 crc kubenswrapper[5002]: I1203 18:12:48.337862 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d-operator-scripts\") pod \"octavia-61c3-account-create-update-lpqs7\" (UID: \"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d\") " pod="openstack/octavia-61c3-account-create-update-lpqs7" Dec 03 18:12:48 crc kubenswrapper[5002]: I1203 18:12:48.337984 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsn4w\" (UniqueName: \"kubernetes.io/projected/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d-kube-api-access-gsn4w\") pod \"octavia-61c3-account-create-update-lpqs7\" (UID: \"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d\") " pod="openstack/octavia-61c3-account-create-update-lpqs7" Dec 03 18:12:48 crc kubenswrapper[5002]: I1203 18:12:48.439870 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d-operator-scripts\") pod \"octavia-61c3-account-create-update-lpqs7\" (UID: \"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d\") " pod="openstack/octavia-61c3-account-create-update-lpqs7" Dec 03 18:12:48 crc kubenswrapper[5002]: I1203 18:12:48.439971 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsn4w\" (UniqueName: \"kubernetes.io/projected/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d-kube-api-access-gsn4w\") pod \"octavia-61c3-account-create-update-lpqs7\" (UID: \"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d\") " pod="openstack/octavia-61c3-account-create-update-lpqs7" Dec 03 18:12:48 crc kubenswrapper[5002]: I1203 18:12:48.441341 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d-operator-scripts\") pod \"octavia-61c3-account-create-update-lpqs7\" (UID: \"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d\") " pod="openstack/octavia-61c3-account-create-update-lpqs7" Dec 03 18:12:48 crc kubenswrapper[5002]: I1203 18:12:48.463907 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsn4w\" (UniqueName: \"kubernetes.io/projected/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d-kube-api-access-gsn4w\") pod \"octavia-61c3-account-create-update-lpqs7\" (UID: \"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d\") " pod="openstack/octavia-61c3-account-create-update-lpqs7" Dec 03 18:12:48 crc kubenswrapper[5002]: I1203 18:12:48.624602 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-61c3-account-create-update-lpqs7" Dec 03 18:12:49 crc kubenswrapper[5002]: I1203 18:12:49.130807 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-61c3-account-create-update-lpqs7"] Dec 03 18:12:49 crc kubenswrapper[5002]: I1203 18:12:49.165702 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-61c3-account-create-update-lpqs7" event={"ID":"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d","Type":"ContainerStarted","Data":"c2448b0729bef8e0dc7518d297aeb17f0a8836a259b4616a8692478b15c7fe3e"} Dec 03 18:12:49 crc kubenswrapper[5002]: I1203 18:12:49.170693 5002 generic.go:334] "Generic (PLEG): container finished" podID="e9403328-ad80-45b1-96aa-3216038c0752" containerID="d3e8163d0b181d68d33c703c781c072b6418bfbcc16a8541e4c7d0984467b000" exitCode=0 Dec 03 18:12:49 crc kubenswrapper[5002]: I1203 18:12:49.170763 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-t9vk8" event={"ID":"e9403328-ad80-45b1-96aa-3216038c0752","Type":"ContainerDied","Data":"d3e8163d0b181d68d33c703c781c072b6418bfbcc16a8541e4c7d0984467b000"} Dec 03 18:12:49 crc kubenswrapper[5002]: I1203 18:12:49.170791 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-t9vk8" event={"ID":"e9403328-ad80-45b1-96aa-3216038c0752","Type":"ContainerStarted","Data":"8041c2213245268db1a60f16ce56b0b67f91c4a2455cf16548d690b7c13c7110"} Dec 03 18:12:50 crc kubenswrapper[5002]: I1203 18:12:50.184027 5002 generic.go:334] "Generic (PLEG): container finished" podID="1addd8f3-d5dd-49dd-a2ee-0846cb3e380d" containerID="b45b06fc50de951c4a57e6f843f221cf0b7f8fd642684cb159e66e13b9acac0a" exitCode=0 Dec 03 18:12:50 crc kubenswrapper[5002]: I1203 18:12:50.184086 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-61c3-account-create-update-lpqs7" event={"ID":"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d","Type":"ContainerDied","Data":"b45b06fc50de951c4a57e6f843f221cf0b7f8fd642684cb159e66e13b9acac0a"} Dec 03 18:12:50 crc kubenswrapper[5002]: I1203 18:12:50.567347 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-t9vk8" Dec 03 18:12:50 crc kubenswrapper[5002]: I1203 18:12:50.679199 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9403328-ad80-45b1-96aa-3216038c0752-operator-scripts\") pod \"e9403328-ad80-45b1-96aa-3216038c0752\" (UID: \"e9403328-ad80-45b1-96aa-3216038c0752\") " Dec 03 18:12:50 crc kubenswrapper[5002]: I1203 18:12:50.679323 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sthx\" (UniqueName: \"kubernetes.io/projected/e9403328-ad80-45b1-96aa-3216038c0752-kube-api-access-8sthx\") pod \"e9403328-ad80-45b1-96aa-3216038c0752\" (UID: \"e9403328-ad80-45b1-96aa-3216038c0752\") " Dec 03 18:12:50 crc kubenswrapper[5002]: I1203 18:12:50.679843 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9403328-ad80-45b1-96aa-3216038c0752-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e9403328-ad80-45b1-96aa-3216038c0752" (UID: "e9403328-ad80-45b1-96aa-3216038c0752"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:12:50 crc kubenswrapper[5002]: I1203 18:12:50.680383 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9403328-ad80-45b1-96aa-3216038c0752-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:12:50 crc kubenswrapper[5002]: I1203 18:12:50.685143 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9403328-ad80-45b1-96aa-3216038c0752-kube-api-access-8sthx" (OuterVolumeSpecName: "kube-api-access-8sthx") pod "e9403328-ad80-45b1-96aa-3216038c0752" (UID: "e9403328-ad80-45b1-96aa-3216038c0752"). InnerVolumeSpecName "kube-api-access-8sthx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:12:50 crc kubenswrapper[5002]: I1203 18:12:50.783194 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sthx\" (UniqueName: \"kubernetes.io/projected/e9403328-ad80-45b1-96aa-3216038c0752-kube-api-access-8sthx\") on node \"crc\" DevicePath \"\"" Dec 03 18:12:51 crc kubenswrapper[5002]: I1203 18:12:51.201475 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-t9vk8" event={"ID":"e9403328-ad80-45b1-96aa-3216038c0752","Type":"ContainerDied","Data":"8041c2213245268db1a60f16ce56b0b67f91c4a2455cf16548d690b7c13c7110"} Dec 03 18:12:51 crc kubenswrapper[5002]: I1203 18:12:51.202133 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8041c2213245268db1a60f16ce56b0b67f91c4a2455cf16548d690b7c13c7110" Dec 03 18:12:51 crc kubenswrapper[5002]: I1203 18:12:51.201556 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-t9vk8" Dec 03 18:12:51 crc kubenswrapper[5002]: I1203 18:12:51.623509 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-61c3-account-create-update-lpqs7" Dec 03 18:12:51 crc kubenswrapper[5002]: I1203 18:12:51.705529 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d-operator-scripts\") pod \"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d\" (UID: \"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d\") " Dec 03 18:12:51 crc kubenswrapper[5002]: I1203 18:12:51.706163 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1addd8f3-d5dd-49dd-a2ee-0846cb3e380d" (UID: "1addd8f3-d5dd-49dd-a2ee-0846cb3e380d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:12:51 crc kubenswrapper[5002]: I1203 18:12:51.706163 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsn4w\" (UniqueName: \"kubernetes.io/projected/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d-kube-api-access-gsn4w\") pod \"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d\" (UID: \"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d\") " Dec 03 18:12:51 crc kubenswrapper[5002]: I1203 18:12:51.706554 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:12:51 crc kubenswrapper[5002]: I1203 18:12:51.712932 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d-kube-api-access-gsn4w" (OuterVolumeSpecName: "kube-api-access-gsn4w") pod "1addd8f3-d5dd-49dd-a2ee-0846cb3e380d" (UID: "1addd8f3-d5dd-49dd-a2ee-0846cb3e380d"). InnerVolumeSpecName "kube-api-access-gsn4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:12:51 crc kubenswrapper[5002]: I1203 18:12:51.809047 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsn4w\" (UniqueName: \"kubernetes.io/projected/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d-kube-api-access-gsn4w\") on node \"crc\" DevicePath \"\"" Dec 03 18:12:52 crc kubenswrapper[5002]: I1203 18:12:52.041901 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-zth4g"] Dec 03 18:12:52 crc kubenswrapper[5002]: I1203 18:12:52.069651 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-zth4g"] Dec 03 18:12:52 crc kubenswrapper[5002]: I1203 18:12:52.214035 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-61c3-account-create-update-lpqs7" event={"ID":"1addd8f3-d5dd-49dd-a2ee-0846cb3e380d","Type":"ContainerDied","Data":"c2448b0729bef8e0dc7518d297aeb17f0a8836a259b4616a8692478b15c7fe3e"} Dec 03 18:12:52 crc kubenswrapper[5002]: I1203 18:12:52.214092 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2448b0729bef8e0dc7518d297aeb17f0a8836a259b4616a8692478b15c7fe3e" Dec 03 18:12:52 crc kubenswrapper[5002]: I1203 18:12:52.214164 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-61c3-account-create-update-lpqs7" Dec 03 18:12:52 crc kubenswrapper[5002]: I1203 18:12:52.853176 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05c8eebd-bf20-45d1-9c0b-42480190c8c9" path="/var/lib/kubelet/pods/05c8eebd-bf20-45d1-9c0b-42480190c8c9/volumes" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.055150 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-75c7c68f7d-662gl"] Dec 03 18:12:54 crc kubenswrapper[5002]: E1203 18:12:54.055942 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9403328-ad80-45b1-96aa-3216038c0752" containerName="mariadb-database-create" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.055957 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9403328-ad80-45b1-96aa-3216038c0752" containerName="mariadb-database-create" Dec 03 18:12:54 crc kubenswrapper[5002]: E1203 18:12:54.055979 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1addd8f3-d5dd-49dd-a2ee-0846cb3e380d" containerName="mariadb-account-create-update" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.055987 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="1addd8f3-d5dd-49dd-a2ee-0846cb3e380d" containerName="mariadb-account-create-update" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.056231 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9403328-ad80-45b1-96aa-3216038c0752" containerName="mariadb-database-create" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.056247 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="1addd8f3-d5dd-49dd-a2ee-0846cb3e380d" containerName="mariadb-account-create-update" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.057683 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.063893 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-octavia-dockercfg-hsg54" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.064044 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-octavia-ovndbs" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.064159 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-config-data" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.064275 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-scripts" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.090474 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-75c7c68f7d-662gl"] Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.150138 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-combined-ca-bundle\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.150258 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9f7b87f8-2afa-4444-ba24-34df442cd2db-config-data-merged\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.150316 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-scripts\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.150436 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-ovndb-tls-certs\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.150530 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/9f7b87f8-2afa-4444-ba24-34df442cd2db-octavia-run\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.150602 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-config-data\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.252238 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-scripts\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.252517 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-ovndb-tls-certs\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.252713 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/9f7b87f8-2afa-4444-ba24-34df442cd2db-octavia-run\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.252872 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-config-data\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.253049 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-combined-ca-bundle\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.253195 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9f7b87f8-2afa-4444-ba24-34df442cd2db-config-data-merged\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.253881 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9f7b87f8-2afa-4444-ba24-34df442cd2db-config-data-merged\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.255265 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/9f7b87f8-2afa-4444-ba24-34df442cd2db-octavia-run\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.260356 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-combined-ca-bundle\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.260724 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-config-data\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.261385 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-scripts\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.261418 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-ovndb-tls-certs\") pod \"octavia-api-75c7c68f7d-662gl\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.383855 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.840711 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:12:54 crc kubenswrapper[5002]: E1203 18:12:54.841436 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:12:54 crc kubenswrapper[5002]: I1203 18:12:54.902704 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-75c7c68f7d-662gl"] Dec 03 18:12:55 crc kubenswrapper[5002]: I1203 18:12:55.245166 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-75c7c68f7d-662gl" event={"ID":"9f7b87f8-2afa-4444-ba24-34df442cd2db","Type":"ContainerStarted","Data":"1d61a233593729f3cbdbf4753b186308ef8617c42a27f9d0af1b3f9d733d8a40"} Dec 03 18:13:04 crc kubenswrapper[5002]: I1203 18:13:04.327496 5002 generic.go:334] "Generic (PLEG): container finished" podID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerID="68cc6b3f60a338a19fae61c950ee4f551566d723a28fc1ed81d28d1f48c52db4" exitCode=0 Dec 03 18:13:04 crc kubenswrapper[5002]: I1203 18:13:04.327601 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-75c7c68f7d-662gl" event={"ID":"9f7b87f8-2afa-4444-ba24-34df442cd2db","Type":"ContainerDied","Data":"68cc6b3f60a338a19fae61c950ee4f551566d723a28fc1ed81d28d1f48c52db4"} Dec 03 18:13:05 crc kubenswrapper[5002]: I1203 18:13:05.342355 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-75c7c68f7d-662gl" event={"ID":"9f7b87f8-2afa-4444-ba24-34df442cd2db","Type":"ContainerStarted","Data":"80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e"} Dec 03 18:13:05 crc kubenswrapper[5002]: I1203 18:13:05.342424 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-75c7c68f7d-662gl" event={"ID":"9f7b87f8-2afa-4444-ba24-34df442cd2db","Type":"ContainerStarted","Data":"f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739"} Dec 03 18:13:05 crc kubenswrapper[5002]: I1203 18:13:05.343961 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:13:05 crc kubenswrapper[5002]: I1203 18:13:05.344048 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:13:05 crc kubenswrapper[5002]: I1203 18:13:05.369211 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-75c7c68f7d-662gl" podStartSLOduration=2.778706418 podStartE2EDuration="11.36918863s" podCreationTimestamp="2025-12-03 18:12:54 +0000 UTC" firstStartedPulling="2025-12-03 18:12:54.900309005 +0000 UTC m=+6098.314130893" lastFinishedPulling="2025-12-03 18:13:03.490791217 +0000 UTC m=+6106.904613105" observedRunningTime="2025-12-03 18:13:05.363188819 +0000 UTC m=+6108.777010707" watchObservedRunningTime="2025-12-03 18:13:05.36918863 +0000 UTC m=+6108.783010518" Dec 03 18:13:05 crc kubenswrapper[5002]: I1203 18:13:05.841203 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:13:05 crc kubenswrapper[5002]: E1203 18:13:05.841403 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:13:06 crc kubenswrapper[5002]: I1203 18:13:06.042710 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-tj5bt"] Dec 03 18:13:06 crc kubenswrapper[5002]: I1203 18:13:06.051789 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-tj5bt"] Dec 03 18:13:06 crc kubenswrapper[5002]: I1203 18:13:06.852124 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2808c3ea-5a81-4602-8353-bee8f8d8c453" path="/var/lib/kubelet/pods/2808c3ea-5a81-4602-8353-bee8f8d8c453/volumes" Dec 03 18:13:12 crc kubenswrapper[5002]: I1203 18:13:12.864247 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:13:12 crc kubenswrapper[5002]: I1203 18:13:12.868009 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-rncbs" Dec 03 18:13:12 crc kubenswrapper[5002]: I1203 18:13:12.872268 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-9l9t9" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.008173 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-rncbs-config-6vjpw"] Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.010383 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.013311 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.041540 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-rncbs-config-6vjpw"] Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.068123 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-run-ovn\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.068201 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-log-ovn\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.068227 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-additional-scripts\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.068290 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-run\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.068424 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-scripts\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.068554 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxhjr\" (UniqueName: \"kubernetes.io/projected/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-kube-api-access-fxhjr\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.169824 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-scripts\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.170163 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxhjr\" (UniqueName: \"kubernetes.io/projected/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-kube-api-access-fxhjr\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.170253 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-run-ovn\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.170274 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-log-ovn\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.170288 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-additional-scripts\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.170315 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-run\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.170601 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-run\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.170652 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-log-ovn\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.170738 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-run-ovn\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.171480 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-additional-scripts\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.173074 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-scripts\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.222304 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxhjr\" (UniqueName: \"kubernetes.io/projected/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-kube-api-access-fxhjr\") pod \"ovn-controller-rncbs-config-6vjpw\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.347978 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.835674 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:13:13 crc kubenswrapper[5002]: I1203 18:13:13.885449 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-rncbs-config-6vjpw"] Dec 03 18:13:14 crc kubenswrapper[5002]: I1203 18:13:14.435558 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rncbs-config-6vjpw" event={"ID":"a9a72ef2-cf65-43bc-b4da-00772c9f99f8","Type":"ContainerStarted","Data":"87dccdb4ff5d9a43a7000fd9d13479fd43c9223a381228068491e97cbaf57f4f"} Dec 03 18:13:14 crc kubenswrapper[5002]: I1203 18:13:14.435985 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rncbs-config-6vjpw" event={"ID":"a9a72ef2-cf65-43bc-b4da-00772c9f99f8","Type":"ContainerStarted","Data":"fcb44c2b5564d932ce050653c47e6f19fbd35b4249b4345c295b5e0166757ac6"} Dec 03 18:13:14 crc kubenswrapper[5002]: I1203 18:13:14.460382 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-rncbs-config-6vjpw" podStartSLOduration=2.46035934 podStartE2EDuration="2.46035934s" podCreationTimestamp="2025-12-03 18:13:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:13:14.452435916 +0000 UTC m=+6117.866257814" watchObservedRunningTime="2025-12-03 18:13:14.46035934 +0000 UTC m=+6117.874181238" Dec 03 18:13:15 crc kubenswrapper[5002]: I1203 18:13:15.449350 5002 generic.go:334] "Generic (PLEG): container finished" podID="a9a72ef2-cf65-43bc-b4da-00772c9f99f8" containerID="87dccdb4ff5d9a43a7000fd9d13479fd43c9223a381228068491e97cbaf57f4f" exitCode=0 Dec 03 18:13:15 crc kubenswrapper[5002]: I1203 18:13:15.449415 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rncbs-config-6vjpw" event={"ID":"a9a72ef2-cf65-43bc-b4da-00772c9f99f8","Type":"ContainerDied","Data":"87dccdb4ff5d9a43a7000fd9d13479fd43c9223a381228068491e97cbaf57f4f"} Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.837174 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.845276 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:13:16 crc kubenswrapper[5002]: E1203 18:13:16.845514 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.946845 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxhjr\" (UniqueName: \"kubernetes.io/projected/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-kube-api-access-fxhjr\") pod \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.946890 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-additional-scripts\") pod \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.947012 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-run\") pod \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.947036 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-scripts\") pod \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.947086 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-log-ovn\") pod \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.947152 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-run-ovn\") pod \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\" (UID: \"a9a72ef2-cf65-43bc-b4da-00772c9f99f8\") " Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.948488 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "a9a72ef2-cf65-43bc-b4da-00772c9f99f8" (UID: "a9a72ef2-cf65-43bc-b4da-00772c9f99f8"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.948602 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-run" (OuterVolumeSpecName: "var-run") pod "a9a72ef2-cf65-43bc-b4da-00772c9f99f8" (UID: "a9a72ef2-cf65-43bc-b4da-00772c9f99f8"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.948671 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "a9a72ef2-cf65-43bc-b4da-00772c9f99f8" (UID: "a9a72ef2-cf65-43bc-b4da-00772c9f99f8"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.948874 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "a9a72ef2-cf65-43bc-b4da-00772c9f99f8" (UID: "a9a72ef2-cf65-43bc-b4da-00772c9f99f8"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.949658 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-scripts" (OuterVolumeSpecName: "scripts") pod "a9a72ef2-cf65-43bc-b4da-00772c9f99f8" (UID: "a9a72ef2-cf65-43bc-b4da-00772c9f99f8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:13:16 crc kubenswrapper[5002]: I1203 18:13:16.971129 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-kube-api-access-fxhjr" (OuterVolumeSpecName: "kube-api-access-fxhjr") pod "a9a72ef2-cf65-43bc-b4da-00772c9f99f8" (UID: "a9a72ef2-cf65-43bc-b4da-00772c9f99f8"). InnerVolumeSpecName "kube-api-access-fxhjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:13:17 crc kubenswrapper[5002]: I1203 18:13:17.049701 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxhjr\" (UniqueName: \"kubernetes.io/projected/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-kube-api-access-fxhjr\") on node \"crc\" DevicePath \"\"" Dec 03 18:13:17 crc kubenswrapper[5002]: I1203 18:13:17.049738 5002 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:13:17 crc kubenswrapper[5002]: I1203 18:13:17.049773 5002 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-run\") on node \"crc\" DevicePath \"\"" Dec 03 18:13:17 crc kubenswrapper[5002]: I1203 18:13:17.049785 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:13:17 crc kubenswrapper[5002]: I1203 18:13:17.049795 5002 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 18:13:17 crc kubenswrapper[5002]: I1203 18:13:17.049805 5002 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a9a72ef2-cf65-43bc-b4da-00772c9f99f8-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 18:13:17 crc kubenswrapper[5002]: I1203 18:13:17.471941 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rncbs-config-6vjpw" event={"ID":"a9a72ef2-cf65-43bc-b4da-00772c9f99f8","Type":"ContainerDied","Data":"fcb44c2b5564d932ce050653c47e6f19fbd35b4249b4345c295b5e0166757ac6"} Dec 03 18:13:17 crc kubenswrapper[5002]: I1203 18:13:17.472292 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcb44c2b5564d932ce050653c47e6f19fbd35b4249b4345c295b5e0166757ac6" Dec 03 18:13:17 crc kubenswrapper[5002]: I1203 18:13:17.471981 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rncbs-config-6vjpw" Dec 03 18:13:17 crc kubenswrapper[5002]: I1203 18:13:17.576339 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-rncbs-config-6vjpw"] Dec 03 18:13:17 crc kubenswrapper[5002]: I1203 18:13:17.588088 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-rncbs-config-6vjpw"] Dec 03 18:13:17 crc kubenswrapper[5002]: E1203 18:13:17.622554 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9a72ef2_cf65_43bc_b4da_00772c9f99f8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9a72ef2_cf65_43bc_b4da_00772c9f99f8.slice/crio-fcb44c2b5564d932ce050653c47e6f19fbd35b4249b4345c295b5e0166757ac6\": RecentStats: unable to find data in memory cache]" Dec 03 18:13:17 crc kubenswrapper[5002]: I1203 18:13:17.994589 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:13:18 crc kubenswrapper[5002]: I1203 18:13:18.854018 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9a72ef2-cf65-43bc-b4da-00772c9f99f8" path="/var/lib/kubelet/pods/a9a72ef2-cf65-43bc-b4da-00772c9f99f8/volumes" Dec 03 18:13:22 crc kubenswrapper[5002]: I1203 18:13:22.896486 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-rsyslog-kn7lw"] Dec 03 18:13:22 crc kubenswrapper[5002]: E1203 18:13:22.897422 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9a72ef2-cf65-43bc-b4da-00772c9f99f8" containerName="ovn-config" Dec 03 18:13:22 crc kubenswrapper[5002]: I1203 18:13:22.897435 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9a72ef2-cf65-43bc-b4da-00772c9f99f8" containerName="ovn-config" Dec 03 18:13:22 crc kubenswrapper[5002]: I1203 18:13:22.897618 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9a72ef2-cf65-43bc-b4da-00772c9f99f8" containerName="ovn-config" Dec 03 18:13:22 crc kubenswrapper[5002]: I1203 18:13:22.898624 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:22 crc kubenswrapper[5002]: I1203 18:13:22.904363 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-scripts" Dec 03 18:13:22 crc kubenswrapper[5002]: I1203 18:13:22.904375 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"octavia-hmport-map" Dec 03 18:13:22 crc kubenswrapper[5002]: I1203 18:13:22.904564 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-config-data" Dec 03 18:13:22 crc kubenswrapper[5002]: I1203 18:13:22.925703 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-kn7lw"] Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.070725 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c01abf35-dc93-4b56-9da5-ed1f312d9402-config-data\") pod \"octavia-rsyslog-kn7lw\" (UID: \"c01abf35-dc93-4b56-9da5-ed1f312d9402\") " pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.070810 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/c01abf35-dc93-4b56-9da5-ed1f312d9402-config-data-merged\") pod \"octavia-rsyslog-kn7lw\" (UID: \"c01abf35-dc93-4b56-9da5-ed1f312d9402\") " pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.070850 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c01abf35-dc93-4b56-9da5-ed1f312d9402-scripts\") pod \"octavia-rsyslog-kn7lw\" (UID: \"c01abf35-dc93-4b56-9da5-ed1f312d9402\") " pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.070879 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/c01abf35-dc93-4b56-9da5-ed1f312d9402-hm-ports\") pod \"octavia-rsyslog-kn7lw\" (UID: \"c01abf35-dc93-4b56-9da5-ed1f312d9402\") " pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.172320 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/c01abf35-dc93-4b56-9da5-ed1f312d9402-config-data-merged\") pod \"octavia-rsyslog-kn7lw\" (UID: \"c01abf35-dc93-4b56-9da5-ed1f312d9402\") " pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.172397 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c01abf35-dc93-4b56-9da5-ed1f312d9402-scripts\") pod \"octavia-rsyslog-kn7lw\" (UID: \"c01abf35-dc93-4b56-9da5-ed1f312d9402\") " pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.172432 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/c01abf35-dc93-4b56-9da5-ed1f312d9402-hm-ports\") pod \"octavia-rsyslog-kn7lw\" (UID: \"c01abf35-dc93-4b56-9da5-ed1f312d9402\") " pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.172579 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c01abf35-dc93-4b56-9da5-ed1f312d9402-config-data\") pod \"octavia-rsyslog-kn7lw\" (UID: \"c01abf35-dc93-4b56-9da5-ed1f312d9402\") " pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.172865 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/c01abf35-dc93-4b56-9da5-ed1f312d9402-config-data-merged\") pod \"octavia-rsyslog-kn7lw\" (UID: \"c01abf35-dc93-4b56-9da5-ed1f312d9402\") " pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.173590 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/c01abf35-dc93-4b56-9da5-ed1f312d9402-hm-ports\") pod \"octavia-rsyslog-kn7lw\" (UID: \"c01abf35-dc93-4b56-9da5-ed1f312d9402\") " pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.179113 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c01abf35-dc93-4b56-9da5-ed1f312d9402-config-data\") pod \"octavia-rsyslog-kn7lw\" (UID: \"c01abf35-dc93-4b56-9da5-ed1f312d9402\") " pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.188406 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c01abf35-dc93-4b56-9da5-ed1f312d9402-scripts\") pod \"octavia-rsyslog-kn7lw\" (UID: \"c01abf35-dc93-4b56-9da5-ed1f312d9402\") " pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.231519 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.671822 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-56c9f55b99-lrdhb"] Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.675025 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.677924 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.697930 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-56c9f55b99-lrdhb"] Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.784201 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a6a8fc82-5ff1-404b-85bf-4839d87588d1-httpd-config\") pod \"octavia-image-upload-56c9f55b99-lrdhb\" (UID: \"a6a8fc82-5ff1-404b-85bf-4839d87588d1\") " pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.784600 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a6a8fc82-5ff1-404b-85bf-4839d87588d1-amphora-image\") pod \"octavia-image-upload-56c9f55b99-lrdhb\" (UID: \"a6a8fc82-5ff1-404b-85bf-4839d87588d1\") " pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.807326 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-kn7lw"] Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.886363 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a6a8fc82-5ff1-404b-85bf-4839d87588d1-httpd-config\") pod \"octavia-image-upload-56c9f55b99-lrdhb\" (UID: \"a6a8fc82-5ff1-404b-85bf-4839d87588d1\") " pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.886429 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a6a8fc82-5ff1-404b-85bf-4839d87588d1-amphora-image\") pod \"octavia-image-upload-56c9f55b99-lrdhb\" (UID: \"a6a8fc82-5ff1-404b-85bf-4839d87588d1\") " pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.887001 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a6a8fc82-5ff1-404b-85bf-4839d87588d1-amphora-image\") pod \"octavia-image-upload-56c9f55b99-lrdhb\" (UID: \"a6a8fc82-5ff1-404b-85bf-4839d87588d1\") " pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" Dec 03 18:13:23 crc kubenswrapper[5002]: I1203 18:13:23.900373 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a6a8fc82-5ff1-404b-85bf-4839d87588d1-httpd-config\") pod \"octavia-image-upload-56c9f55b99-lrdhb\" (UID: \"a6a8fc82-5ff1-404b-85bf-4839d87588d1\") " pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.016468 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.392933 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-sync-sps4w"] Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.395440 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.398420 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-scripts" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.425401 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-sps4w"] Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.498623 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6533ba9f-610a-4fef-8c1b-8a519b0f8957-config-data-merged\") pod \"octavia-db-sync-sps4w\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.498706 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-scripts\") pod \"octavia-db-sync-sps4w\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.498798 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-config-data\") pod \"octavia-db-sync-sps4w\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.498819 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-combined-ca-bundle\") pod \"octavia-db-sync-sps4w\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.530137 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-56c9f55b99-lrdhb"] Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.544202 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-kn7lw" event={"ID":"c01abf35-dc93-4b56-9da5-ed1f312d9402","Type":"ContainerStarted","Data":"c1df8803b9af326d4a73fdc903bf7d61d31e057f38e7c81e1989c1ff700f96a7"} Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.601159 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6533ba9f-610a-4fef-8c1b-8a519b0f8957-config-data-merged\") pod \"octavia-db-sync-sps4w\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.601268 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-scripts\") pod \"octavia-db-sync-sps4w\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.601322 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-config-data\") pod \"octavia-db-sync-sps4w\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.601344 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-combined-ca-bundle\") pod \"octavia-db-sync-sps4w\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.601943 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6533ba9f-610a-4fef-8c1b-8a519b0f8957-config-data-merged\") pod \"octavia-db-sync-sps4w\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.608063 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-combined-ca-bundle\") pod \"octavia-db-sync-sps4w\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.608119 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-scripts\") pod \"octavia-db-sync-sps4w\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.608611 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-config-data\") pod \"octavia-db-sync-sps4w\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:24 crc kubenswrapper[5002]: I1203 18:13:24.721097 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:25 crc kubenswrapper[5002]: I1203 18:13:25.251194 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-sps4w"] Dec 03 18:13:25 crc kubenswrapper[5002]: I1203 18:13:25.558104 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" event={"ID":"a6a8fc82-5ff1-404b-85bf-4839d87588d1","Type":"ContainerStarted","Data":"dcfa7dd498c990dbc9b333df4108334e4e3fbc668c65127e3b21beb92b6937ae"} Dec 03 18:13:25 crc kubenswrapper[5002]: W1203 18:13:25.575523 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6533ba9f_610a_4fef_8c1b_8a519b0f8957.slice/crio-8108f391c4530b223783484fe3c7344b9d60ce113abba16cd4e6223c9776fe1e WatchSource:0}: Error finding container 8108f391c4530b223783484fe3c7344b9d60ce113abba16cd4e6223c9776fe1e: Status 404 returned error can't find the container with id 8108f391c4530b223783484fe3c7344b9d60ce113abba16cd4e6223c9776fe1e Dec 03 18:13:26 crc kubenswrapper[5002]: I1203 18:13:26.575494 5002 generic.go:334] "Generic (PLEG): container finished" podID="6533ba9f-610a-4fef-8c1b-8a519b0f8957" containerID="8e47d3edf212aa23036ba85a5e490a98fa1d5d7d8897c2a9a912a1a200e58c69" exitCode=0 Dec 03 18:13:26 crc kubenswrapper[5002]: I1203 18:13:26.575682 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-sps4w" event={"ID":"6533ba9f-610a-4fef-8c1b-8a519b0f8957","Type":"ContainerDied","Data":"8e47d3edf212aa23036ba85a5e490a98fa1d5d7d8897c2a9a912a1a200e58c69"} Dec 03 18:13:26 crc kubenswrapper[5002]: I1203 18:13:26.575940 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-sps4w" event={"ID":"6533ba9f-610a-4fef-8c1b-8a519b0f8957","Type":"ContainerStarted","Data":"8108f391c4530b223783484fe3c7344b9d60ce113abba16cd4e6223c9776fe1e"} Dec 03 18:13:27 crc kubenswrapper[5002]: I1203 18:13:27.588507 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-sps4w" event={"ID":"6533ba9f-610a-4fef-8c1b-8a519b0f8957","Type":"ContainerStarted","Data":"58a94420c54bb12a185c806815cadde017e694e62e91ebb1a13ccb85b91acbb4"} Dec 03 18:13:27 crc kubenswrapper[5002]: I1203 18:13:27.592202 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-kn7lw" event={"ID":"c01abf35-dc93-4b56-9da5-ed1f312d9402","Type":"ContainerStarted","Data":"7991dcba71a29c76a0bd7a4e11244710920c0749d7c65943a689fbaa91f07271"} Dec 03 18:13:27 crc kubenswrapper[5002]: I1203 18:13:27.610068 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-db-sync-sps4w" podStartSLOduration=3.610019464 podStartE2EDuration="3.610019464s" podCreationTimestamp="2025-12-03 18:13:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:13:27.608859322 +0000 UTC m=+6131.022681210" watchObservedRunningTime="2025-12-03 18:13:27.610019464 +0000 UTC m=+6131.023841342" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.266463 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-healthmanager-b4mlk"] Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.271048 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.278067 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-scripts" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.278195 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-certs-secret" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.278467 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-config-data" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.279061 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-b4mlk"] Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.380139 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-combined-ca-bundle\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.380202 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-config-data\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.380232 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-hm-ports\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.380326 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-amphora-certs\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.380383 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-config-data-merged\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.380431 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-scripts\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.482138 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-config-data\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.482203 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-hm-ports\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.482293 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-amphora-certs\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.482343 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-config-data-merged\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.482429 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-scripts\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.482548 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-combined-ca-bundle\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.483499 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-config-data-merged\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.483765 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-hm-ports\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.488354 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-scripts\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.488422 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-combined-ca-bundle\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.488995 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-config-data\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.491001 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/015396fc-f13a-4bc0-a8e6-6f2a4d6d97db-amphora-certs\") pod \"octavia-healthmanager-b4mlk\" (UID: \"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db\") " pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.591918 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:28 crc kubenswrapper[5002]: I1203 18:13:28.840368 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:13:28 crc kubenswrapper[5002]: E1203 18:13:28.840927 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:13:29 crc kubenswrapper[5002]: I1203 18:13:29.410617 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-b4mlk"] Dec 03 18:13:29 crc kubenswrapper[5002]: I1203 18:13:29.612546 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-b4mlk" event={"ID":"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db","Type":"ContainerStarted","Data":"d9f35a33db12ce74062f13702d73130aaeab1c2b80f596da65ef4960e5dc5164"} Dec 03 18:13:30 crc kubenswrapper[5002]: I1203 18:13:30.628657 5002 generic.go:334] "Generic (PLEG): container finished" podID="c01abf35-dc93-4b56-9da5-ed1f312d9402" containerID="7991dcba71a29c76a0bd7a4e11244710920c0749d7c65943a689fbaa91f07271" exitCode=0 Dec 03 18:13:30 crc kubenswrapper[5002]: I1203 18:13:30.628962 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-kn7lw" event={"ID":"c01abf35-dc93-4b56-9da5-ed1f312d9402","Type":"ContainerDied","Data":"7991dcba71a29c76a0bd7a4e11244710920c0749d7c65943a689fbaa91f07271"} Dec 03 18:13:30 crc kubenswrapper[5002]: I1203 18:13:30.638559 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-b4mlk" event={"ID":"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db","Type":"ContainerStarted","Data":"f7e111926b49381fa2fc78c20bf1dfbb5b52cf0836ea42fac32f8d0c9d766076"} Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.002476 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-housekeeping-v26l4"] Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.005499 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.008212 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-config-data" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.008304 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-scripts" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.023178 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-v26l4"] Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.073683 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/32afa6b4-a338-4399-8544-eb7ded9089ed-amphora-certs\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.073763 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32afa6b4-a338-4399-8544-eb7ded9089ed-config-data\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.073790 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/32afa6b4-a338-4399-8544-eb7ded9089ed-hm-ports\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.073816 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32afa6b4-a338-4399-8544-eb7ded9089ed-scripts\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.073835 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/32afa6b4-a338-4399-8544-eb7ded9089ed-config-data-merged\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.073907 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32afa6b4-a338-4399-8544-eb7ded9089ed-combined-ca-bundle\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.175813 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32afa6b4-a338-4399-8544-eb7ded9089ed-combined-ca-bundle\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.175959 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/32afa6b4-a338-4399-8544-eb7ded9089ed-amphora-certs\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.175997 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32afa6b4-a338-4399-8544-eb7ded9089ed-config-data\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.176028 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/32afa6b4-a338-4399-8544-eb7ded9089ed-hm-ports\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.176066 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32afa6b4-a338-4399-8544-eb7ded9089ed-scripts\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.176093 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/32afa6b4-a338-4399-8544-eb7ded9089ed-config-data-merged\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.176901 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/32afa6b4-a338-4399-8544-eb7ded9089ed-config-data-merged\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.177345 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/32afa6b4-a338-4399-8544-eb7ded9089ed-hm-ports\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.182838 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32afa6b4-a338-4399-8544-eb7ded9089ed-config-data\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.183231 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32afa6b4-a338-4399-8544-eb7ded9089ed-combined-ca-bundle\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.183631 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/32afa6b4-a338-4399-8544-eb7ded9089ed-amphora-certs\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.198361 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32afa6b4-a338-4399-8544-eb7ded9089ed-scripts\") pod \"octavia-housekeeping-v26l4\" (UID: \"32afa6b4-a338-4399-8544-eb7ded9089ed\") " pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:31 crc kubenswrapper[5002]: I1203 18:13:31.355479 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:32 crc kubenswrapper[5002]: I1203 18:13:32.661818 5002 generic.go:334] "Generic (PLEG): container finished" podID="015396fc-f13a-4bc0-a8e6-6f2a4d6d97db" containerID="f7e111926b49381fa2fc78c20bf1dfbb5b52cf0836ea42fac32f8d0c9d766076" exitCode=0 Dec 03 18:13:32 crc kubenswrapper[5002]: I1203 18:13:32.661925 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-b4mlk" event={"ID":"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db","Type":"ContainerDied","Data":"f7e111926b49381fa2fc78c20bf1dfbb5b52cf0836ea42fac32f8d0c9d766076"} Dec 03 18:13:33 crc kubenswrapper[5002]: I1203 18:13:33.768807 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-worker-jpwzl"] Dec 03 18:13:33 crc kubenswrapper[5002]: I1203 18:13:33.770979 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:33 crc kubenswrapper[5002]: I1203 18:13:33.777660 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-scripts" Dec 03 18:13:33 crc kubenswrapper[5002]: I1203 18:13:33.778853 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-config-data" Dec 03 18:13:33 crc kubenswrapper[5002]: I1203 18:13:33.785339 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-jpwzl"] Dec 03 18:13:33 crc kubenswrapper[5002]: I1203 18:13:33.953022 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f521da5-5148-443f-94ec-6561e981c3b4-scripts\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:33 crc kubenswrapper[5002]: I1203 18:13:33.953205 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/9f521da5-5148-443f-94ec-6561e981c3b4-hm-ports\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:33 crc kubenswrapper[5002]: I1203 18:13:33.953296 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f521da5-5148-443f-94ec-6561e981c3b4-combined-ca-bundle\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:33 crc kubenswrapper[5002]: I1203 18:13:33.953327 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/9f521da5-5148-443f-94ec-6561e981c3b4-amphora-certs\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:33 crc kubenswrapper[5002]: I1203 18:13:33.953357 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9f521da5-5148-443f-94ec-6561e981c3b4-config-data-merged\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:33 crc kubenswrapper[5002]: I1203 18:13:33.953413 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f521da5-5148-443f-94ec-6561e981c3b4-config-data\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.055542 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f521da5-5148-443f-94ec-6561e981c3b4-config-data\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.055704 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f521da5-5148-443f-94ec-6561e981c3b4-scripts\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.055906 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/9f521da5-5148-443f-94ec-6561e981c3b4-hm-ports\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.057704 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/9f521da5-5148-443f-94ec-6561e981c3b4-hm-ports\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.058026 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f521da5-5148-443f-94ec-6561e981c3b4-combined-ca-bundle\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.058083 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/9f521da5-5148-443f-94ec-6561e981c3b4-amphora-certs\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.058136 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9f521da5-5148-443f-94ec-6561e981c3b4-config-data-merged\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.058865 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9f521da5-5148-443f-94ec-6561e981c3b4-config-data-merged\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.064479 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/9f521da5-5148-443f-94ec-6561e981c3b4-amphora-certs\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.065261 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f521da5-5148-443f-94ec-6561e981c3b4-scripts\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.066953 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f521da5-5148-443f-94ec-6561e981c3b4-config-data\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.071643 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f521da5-5148-443f-94ec-6561e981c3b4-combined-ca-bundle\") pod \"octavia-worker-jpwzl\" (UID: \"9f521da5-5148-443f-94ec-6561e981c3b4\") " pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:34 crc kubenswrapper[5002]: I1203 18:13:34.101852 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:35 crc kubenswrapper[5002]: I1203 18:13:35.706491 5002 generic.go:334] "Generic (PLEG): container finished" podID="6533ba9f-610a-4fef-8c1b-8a519b0f8957" containerID="58a94420c54bb12a185c806815cadde017e694e62e91ebb1a13ccb85b91acbb4" exitCode=0 Dec 03 18:13:35 crc kubenswrapper[5002]: I1203 18:13:35.706574 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-sps4w" event={"ID":"6533ba9f-610a-4fef-8c1b-8a519b0f8957","Type":"ContainerDied","Data":"58a94420c54bb12a185c806815cadde017e694e62e91ebb1a13ccb85b91acbb4"} Dec 03 18:13:35 crc kubenswrapper[5002]: I1203 18:13:35.917915 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-v26l4"] Dec 03 18:13:35 crc kubenswrapper[5002]: I1203 18:13:35.971609 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-jpwzl"] Dec 03 18:13:35 crc kubenswrapper[5002]: W1203 18:13:35.973600 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f521da5_5148_443f_94ec_6561e981c3b4.slice/crio-353cc0a978c7d336cf373a0e5110d0bc4730bf2ee9501c0934eb12cabe397a20 WatchSource:0}: Error finding container 353cc0a978c7d336cf373a0e5110d0bc4730bf2ee9501c0934eb12cabe397a20: Status 404 returned error can't find the container with id 353cc0a978c7d336cf373a0e5110d0bc4730bf2ee9501c0934eb12cabe397a20 Dec 03 18:13:36 crc kubenswrapper[5002]: I1203 18:13:36.730268 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-jpwzl" event={"ID":"9f521da5-5148-443f-94ec-6561e981c3b4","Type":"ContainerStarted","Data":"353cc0a978c7d336cf373a0e5110d0bc4730bf2ee9501c0934eb12cabe397a20"} Dec 03 18:13:36 crc kubenswrapper[5002]: I1203 18:13:36.740645 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-kn7lw" event={"ID":"c01abf35-dc93-4b56-9da5-ed1f312d9402","Type":"ContainerStarted","Data":"e0744a3eb383b54bcd3dd88ff3c83a30708ae621b01679df1b84e70c9716add0"} Dec 03 18:13:36 crc kubenswrapper[5002]: I1203 18:13:36.741444 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:36 crc kubenswrapper[5002]: I1203 18:13:36.745101 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-b4mlk" event={"ID":"015396fc-f13a-4bc0-a8e6-6f2a4d6d97db","Type":"ContainerStarted","Data":"6b76a936cbfd2ff3680dcc6db54bb821fca2aae941e429b5db98f2e07a02cf25"} Dec 03 18:13:36 crc kubenswrapper[5002]: I1203 18:13:36.745344 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:36 crc kubenswrapper[5002]: I1203 18:13:36.747733 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-v26l4" event={"ID":"32afa6b4-a338-4399-8544-eb7ded9089ed","Type":"ContainerStarted","Data":"fa513a013afc111786b1e123011420b0e5df272d994da0e3c86d5463bc3776b0"} Dec 03 18:13:36 crc kubenswrapper[5002]: I1203 18:13:36.750046 5002 generic.go:334] "Generic (PLEG): container finished" podID="a6a8fc82-5ff1-404b-85bf-4839d87588d1" containerID="0b7299b36eb6eeee9886e9442ae4bb615ada47ae21f563b2256194b44996126a" exitCode=0 Dec 03 18:13:36 crc kubenswrapper[5002]: I1203 18:13:36.750265 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" event={"ID":"a6a8fc82-5ff1-404b-85bf-4839d87588d1","Type":"ContainerDied","Data":"0b7299b36eb6eeee9886e9442ae4bb615ada47ae21f563b2256194b44996126a"} Dec 03 18:13:36 crc kubenswrapper[5002]: I1203 18:13:36.769278 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-rsyslog-kn7lw" podStartSLOduration=3.264898897 podStartE2EDuration="14.769251807s" podCreationTimestamp="2025-12-03 18:13:22 +0000 UTC" firstStartedPulling="2025-12-03 18:13:23.810583589 +0000 UTC m=+6127.224405477" lastFinishedPulling="2025-12-03 18:13:35.314936499 +0000 UTC m=+6138.728758387" observedRunningTime="2025-12-03 18:13:36.755674842 +0000 UTC m=+6140.169496720" watchObservedRunningTime="2025-12-03 18:13:36.769251807 +0000 UTC m=+6140.183073695" Dec 03 18:13:36 crc kubenswrapper[5002]: I1203 18:13:36.791779 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-healthmanager-b4mlk" podStartSLOduration=8.791758263 podStartE2EDuration="8.791758263s" podCreationTimestamp="2025-12-03 18:13:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:13:36.774257352 +0000 UTC m=+6140.188079240" watchObservedRunningTime="2025-12-03 18:13:36.791758263 +0000 UTC m=+6140.205580151" Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.468548 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.582506 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-combined-ca-bundle\") pod \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.582634 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-config-data\") pod \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.582708 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-scripts\") pod \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.582899 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6533ba9f-610a-4fef-8c1b-8a519b0f8957-config-data-merged\") pod \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\" (UID: \"6533ba9f-610a-4fef-8c1b-8a519b0f8957\") " Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.588529 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-scripts" (OuterVolumeSpecName: "scripts") pod "6533ba9f-610a-4fef-8c1b-8a519b0f8957" (UID: "6533ba9f-610a-4fef-8c1b-8a519b0f8957"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.588880 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-config-data" (OuterVolumeSpecName: "config-data") pod "6533ba9f-610a-4fef-8c1b-8a519b0f8957" (UID: "6533ba9f-610a-4fef-8c1b-8a519b0f8957"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.608609 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6533ba9f-610a-4fef-8c1b-8a519b0f8957-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "6533ba9f-610a-4fef-8c1b-8a519b0f8957" (UID: "6533ba9f-610a-4fef-8c1b-8a519b0f8957"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.619855 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6533ba9f-610a-4fef-8c1b-8a519b0f8957" (UID: "6533ba9f-610a-4fef-8c1b-8a519b0f8957"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.686333 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.686368 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.686377 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6533ba9f-610a-4fef-8c1b-8a519b0f8957-config-data-merged\") on node \"crc\" DevicePath \"\"" Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.686389 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6533ba9f-610a-4fef-8c1b-8a519b0f8957-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.789398 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-sps4w" event={"ID":"6533ba9f-610a-4fef-8c1b-8a519b0f8957","Type":"ContainerDied","Data":"8108f391c4530b223783484fe3c7344b9d60ce113abba16cd4e6223c9776fe1e"} Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.789799 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8108f391c4530b223783484fe3c7344b9d60ce113abba16cd4e6223c9776fe1e" Dec 03 18:13:39 crc kubenswrapper[5002]: I1203 18:13:39.789467 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-sps4w" Dec 03 18:13:40 crc kubenswrapper[5002]: I1203 18:13:40.814569 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" event={"ID":"a6a8fc82-5ff1-404b-85bf-4839d87588d1","Type":"ContainerStarted","Data":"cd37b19fd16dc01346c08af0586e14c9a42aeb1a0efc81057f7b4c74858d383e"} Dec 03 18:13:40 crc kubenswrapper[5002]: I1203 18:13:40.854281 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" podStartSLOduration=6.9061047989999995 podStartE2EDuration="17.854256645s" podCreationTimestamp="2025-12-03 18:13:23 +0000 UTC" firstStartedPulling="2025-12-03 18:13:24.546942226 +0000 UTC m=+6127.960764114" lastFinishedPulling="2025-12-03 18:13:35.495094072 +0000 UTC m=+6138.908915960" observedRunningTime="2025-12-03 18:13:40.834619676 +0000 UTC m=+6144.248441564" watchObservedRunningTime="2025-12-03 18:13:40.854256645 +0000 UTC m=+6144.268078533" Dec 03 18:13:41 crc kubenswrapper[5002]: I1203 18:13:41.826593 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-jpwzl" event={"ID":"9f521da5-5148-443f-94ec-6561e981c3b4","Type":"ContainerStarted","Data":"20cf99eabd6cdd1fa357e42d3021ea59686e0ec2252ca145560adbc35345c095"} Dec 03 18:13:41 crc kubenswrapper[5002]: I1203 18:13:41.831637 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-v26l4" event={"ID":"32afa6b4-a338-4399-8544-eb7ded9089ed","Type":"ContainerStarted","Data":"01d4753b894fd4d1ccad0cf287ab7e900f17a1ba57d376a10431dc7e323ff59d"} Dec 03 18:13:41 crc kubenswrapper[5002]: I1203 18:13:41.840644 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:13:41 crc kubenswrapper[5002]: E1203 18:13:41.840928 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.380142 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-6547c5866b-kf9h4"] Dec 03 18:13:42 crc kubenswrapper[5002]: E1203 18:13:42.380642 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6533ba9f-610a-4fef-8c1b-8a519b0f8957" containerName="octavia-db-sync" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.380976 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6533ba9f-610a-4fef-8c1b-8a519b0f8957" containerName="octavia-db-sync" Dec 03 18:13:42 crc kubenswrapper[5002]: E1203 18:13:42.381019 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6533ba9f-610a-4fef-8c1b-8a519b0f8957" containerName="init" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.381028 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6533ba9f-610a-4fef-8c1b-8a519b0f8957" containerName="init" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.381257 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6533ba9f-610a-4fef-8c1b-8a519b0f8957" containerName="octavia-db-sync" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.384949 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.388495 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-octavia-internal-svc" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.388503 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-octavia-public-svc" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.393475 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-6547c5866b-kf9h4"] Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.438576 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-public-tls-certs\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.438658 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-config-data\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.438901 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-scripts\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.438956 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-combined-ca-bundle\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.439007 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-ovndb-tls-certs\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.439035 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/dfb421b6-29a9-403a-a0b6-830781851826-octavia-run\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.439083 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/dfb421b6-29a9-403a-a0b6-830781851826-config-data-merged\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.439248 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-internal-tls-certs\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.541194 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-scripts\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.541251 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-combined-ca-bundle\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.541281 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-ovndb-tls-certs\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.541309 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/dfb421b6-29a9-403a-a0b6-830781851826-octavia-run\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.541374 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/dfb421b6-29a9-403a-a0b6-830781851826-config-data-merged\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.541448 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-internal-tls-certs\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.541499 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-public-tls-certs\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.541533 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-config-data\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.542596 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/dfb421b6-29a9-403a-a0b6-830781851826-octavia-run\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.542837 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/dfb421b6-29a9-403a-a0b6-830781851826-config-data-merged\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.547674 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-scripts\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.547869 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-config-data\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.548385 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-internal-tls-certs\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.548553 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-ovndb-tls-certs\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.549779 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-combined-ca-bundle\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.552335 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfb421b6-29a9-403a-a0b6-830781851826-public-tls-certs\") pod \"octavia-api-6547c5866b-kf9h4\" (UID: \"dfb421b6-29a9-403a-a0b6-830781851826\") " pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.717000 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.880615 5002 generic.go:334] "Generic (PLEG): container finished" podID="9f521da5-5148-443f-94ec-6561e981c3b4" containerID="20cf99eabd6cdd1fa357e42d3021ea59686e0ec2252ca145560adbc35345c095" exitCode=0 Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.881267 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-jpwzl" event={"ID":"9f521da5-5148-443f-94ec-6561e981c3b4","Type":"ContainerDied","Data":"20cf99eabd6cdd1fa357e42d3021ea59686e0ec2252ca145560adbc35345c095"} Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.903149 5002 generic.go:334] "Generic (PLEG): container finished" podID="32afa6b4-a338-4399-8544-eb7ded9089ed" containerID="01d4753b894fd4d1ccad0cf287ab7e900f17a1ba57d376a10431dc7e323ff59d" exitCode=0 Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.903195 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-v26l4" event={"ID":"32afa6b4-a338-4399-8544-eb7ded9089ed","Type":"ContainerDied","Data":"01d4753b894fd4d1ccad0cf287ab7e900f17a1ba57d376a10431dc7e323ff59d"} Dec 03 18:13:42 crc kubenswrapper[5002]: I1203 18:13:42.914100 5002 scope.go:117] "RemoveContainer" containerID="43f285c3ff2359576a27562ea0191bb46301e83ee4e0161a8b0361bf6ce08d0e" Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.246689 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-6547c5866b-kf9h4"] Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.285933 5002 scope.go:117] "RemoveContainer" containerID="ecb175047a941ab35c7ce90bbf6b3ed3fdf3abc5797bdb0bf20b07396d8b5539" Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.319829 5002 scope.go:117] "RemoveContainer" containerID="49eb5dbc26068f2681925e06fe19af42cad6b361f0fe98c50b7094b00c818005" Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.357287 5002 scope.go:117] "RemoveContainer" containerID="0070b16044af8613d27073576ef302b5b52d3790e3375233066f86781097eddd" Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.397261 5002 scope.go:117] "RemoveContainer" containerID="8836ffd8a814ea1c8bf4794e39cce0159665d51e5cd472f6b24142990cc2139a" Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.430247 5002 scope.go:117] "RemoveContainer" containerID="873abe10299607fc9764747fd60143187437ea34d8b5e840fe8c3a414ae0bd62" Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.456984 5002 scope.go:117] "RemoveContainer" containerID="4fb56380c30add21cc422377ee92ae493904f70445429924d3e3dcca7d36f98f" Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.647224 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-healthmanager-b4mlk" Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.926967 5002 generic.go:334] "Generic (PLEG): container finished" podID="dfb421b6-29a9-403a-a0b6-830781851826" containerID="8e77231b2749702ba748254b5968533cdbbd3c0be71ebe5494b0de7682da8dcd" exitCode=0 Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.927056 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6547c5866b-kf9h4" event={"ID":"dfb421b6-29a9-403a-a0b6-830781851826","Type":"ContainerDied","Data":"8e77231b2749702ba748254b5968533cdbbd3c0be71ebe5494b0de7682da8dcd"} Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.927103 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6547c5866b-kf9h4" event={"ID":"dfb421b6-29a9-403a-a0b6-830781851826","Type":"ContainerStarted","Data":"8fefb009618b90979720f163afc3f7963e36b45f7a0ef3b8a6067eea5dcd8090"} Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.935576 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-jpwzl" event={"ID":"9f521da5-5148-443f-94ec-6561e981c3b4","Type":"ContainerStarted","Data":"25d1214af620240f7c4f204d98e3b475a1d33f19e4b969158d56f32d40c85117"} Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.935756 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.938326 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-v26l4" event={"ID":"32afa6b4-a338-4399-8544-eb7ded9089ed","Type":"ContainerStarted","Data":"c067a52b1f52ffaf2b68239aa141ce7884642e853278caec1ca9e807c3698cb9"} Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.939448 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.982390 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-housekeeping-v26l4" podStartSLOduration=8.585041805 podStartE2EDuration="13.982372174s" podCreationTimestamp="2025-12-03 18:13:30 +0000 UTC" firstStartedPulling="2025-12-03 18:13:35.939384952 +0000 UTC m=+6139.353206840" lastFinishedPulling="2025-12-03 18:13:41.336715321 +0000 UTC m=+6144.750537209" observedRunningTime="2025-12-03 18:13:43.969169749 +0000 UTC m=+6147.382991657" watchObservedRunningTime="2025-12-03 18:13:43.982372174 +0000 UTC m=+6147.396194062" Dec 03 18:13:43 crc kubenswrapper[5002]: I1203 18:13:43.998498 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-worker-jpwzl" podStartSLOduration=5.640224769 podStartE2EDuration="10.998478767s" podCreationTimestamp="2025-12-03 18:13:33 +0000 UTC" firstStartedPulling="2025-12-03 18:13:35.975854824 +0000 UTC m=+6139.389676712" lastFinishedPulling="2025-12-03 18:13:41.334108822 +0000 UTC m=+6144.747930710" observedRunningTime="2025-12-03 18:13:43.991188171 +0000 UTC m=+6147.405010059" watchObservedRunningTime="2025-12-03 18:13:43.998478767 +0000 UTC m=+6147.412300665" Dec 03 18:13:44 crc kubenswrapper[5002]: I1203 18:13:44.951104 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6547c5866b-kf9h4" event={"ID":"dfb421b6-29a9-403a-a0b6-830781851826","Type":"ContainerStarted","Data":"cfb08df2b6214039c9b16da94680914acbbefcf795ad2414d81ec7caad26319d"} Dec 03 18:13:44 crc kubenswrapper[5002]: I1203 18:13:44.951483 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6547c5866b-kf9h4" event={"ID":"dfb421b6-29a9-403a-a0b6-830781851826","Type":"ContainerStarted","Data":"cfeeb7d1babaa88c4409c9eba3943ec58ae68d2e56939c61b11e5efcbbe46b05"} Dec 03 18:13:44 crc kubenswrapper[5002]: I1203 18:13:44.951621 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:44 crc kubenswrapper[5002]: I1203 18:13:44.981422 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-6547c5866b-kf9h4" podStartSLOduration=2.981404367 podStartE2EDuration="2.981404367s" podCreationTimestamp="2025-12-03 18:13:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:13:44.973835013 +0000 UTC m=+6148.387656921" watchObservedRunningTime="2025-12-03 18:13:44.981404367 +0000 UTC m=+6148.395226255" Dec 03 18:13:45 crc kubenswrapper[5002]: I1203 18:13:45.959919 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:13:49 crc kubenswrapper[5002]: I1203 18:13:49.158314 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-worker-jpwzl" Dec 03 18:13:53 crc kubenswrapper[5002]: I1203 18:13:53.263473 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-rsyslog-kn7lw" Dec 03 18:13:53 crc kubenswrapper[5002]: I1203 18:13:53.840836 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:13:53 crc kubenswrapper[5002]: E1203 18:13:53.841530 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:14:01 crc kubenswrapper[5002]: I1203 18:14:01.386839 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-housekeeping-v26l4" Dec 03 18:14:01 crc kubenswrapper[5002]: I1203 18:14:01.921980 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:14:01 crc kubenswrapper[5002]: I1203 18:14:01.951753 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-6547c5866b-kf9h4" Dec 03 18:14:02 crc kubenswrapper[5002]: I1203 18:14:02.028333 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-api-75c7c68f7d-662gl"] Dec 03 18:14:02 crc kubenswrapper[5002]: I1203 18:14:02.028623 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-api-75c7c68f7d-662gl" podUID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerName="octavia-api" containerID="cri-o://f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739" gracePeriod=30 Dec 03 18:14:02 crc kubenswrapper[5002]: I1203 18:14:02.028795 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-api-75c7c68f7d-662gl" podUID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerName="octavia-api-provider-agent" containerID="cri-o://80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e" gracePeriod=30 Dec 03 18:14:03 crc kubenswrapper[5002]: I1203 18:14:03.173074 5002 generic.go:334] "Generic (PLEG): container finished" podID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerID="80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e" exitCode=0 Dec 03 18:14:03 crc kubenswrapper[5002]: I1203 18:14:03.173192 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-75c7c68f7d-662gl" event={"ID":"9f7b87f8-2afa-4444-ba24-34df442cd2db","Type":"ContainerDied","Data":"80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e"} Dec 03 18:14:04 crc kubenswrapper[5002]: I1203 18:14:04.841776 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:14:04 crc kubenswrapper[5002]: E1203 18:14:04.842036 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.695064 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.760460 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/9f7b87f8-2afa-4444-ba24-34df442cd2db-octavia-run\") pod \"9f7b87f8-2afa-4444-ba24-34df442cd2db\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.760601 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9f7b87f8-2afa-4444-ba24-34df442cd2db-config-data-merged\") pod \"9f7b87f8-2afa-4444-ba24-34df442cd2db\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.760767 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-combined-ca-bundle\") pod \"9f7b87f8-2afa-4444-ba24-34df442cd2db\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.760867 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-scripts\") pod \"9f7b87f8-2afa-4444-ba24-34df442cd2db\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.760904 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-ovndb-tls-certs\") pod \"9f7b87f8-2afa-4444-ba24-34df442cd2db\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.760939 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-config-data\") pod \"9f7b87f8-2afa-4444-ba24-34df442cd2db\" (UID: \"9f7b87f8-2afa-4444-ba24-34df442cd2db\") " Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.763446 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f7b87f8-2afa-4444-ba24-34df442cd2db-octavia-run" (OuterVolumeSpecName: "octavia-run") pod "9f7b87f8-2afa-4444-ba24-34df442cd2db" (UID: "9f7b87f8-2afa-4444-ba24-34df442cd2db"). InnerVolumeSpecName "octavia-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.773014 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-config-data" (OuterVolumeSpecName: "config-data") pod "9f7b87f8-2afa-4444-ba24-34df442cd2db" (UID: "9f7b87f8-2afa-4444-ba24-34df442cd2db"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.778823 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-scripts" (OuterVolumeSpecName: "scripts") pod "9f7b87f8-2afa-4444-ba24-34df442cd2db" (UID: "9f7b87f8-2afa-4444-ba24-34df442cd2db"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.820521 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f7b87f8-2afa-4444-ba24-34df442cd2db" (UID: "9f7b87f8-2afa-4444-ba24-34df442cd2db"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.836846 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f7b87f8-2afa-4444-ba24-34df442cd2db-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "9f7b87f8-2afa-4444-ba24-34df442cd2db" (UID: "9f7b87f8-2afa-4444-ba24-34df442cd2db"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.862824 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.862852 5002 reconciler_common.go:293] "Volume detached for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/9f7b87f8-2afa-4444-ba24-34df442cd2db-octavia-run\") on node \"crc\" DevicePath \"\"" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.862862 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9f7b87f8-2afa-4444-ba24-34df442cd2db-config-data-merged\") on node \"crc\" DevicePath \"\"" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.862870 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.862879 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.923735 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "9f7b87f8-2afa-4444-ba24-34df442cd2db" (UID: "9f7b87f8-2afa-4444-ba24-34df442cd2db"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:14:05 crc kubenswrapper[5002]: I1203 18:14:05.964355 5002 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f7b87f8-2afa-4444-ba24-34df442cd2db-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.208902 5002 generic.go:334] "Generic (PLEG): container finished" podID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerID="f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739" exitCode=0 Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.208983 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-75c7c68f7d-662gl" Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.209027 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-75c7c68f7d-662gl" event={"ID":"9f7b87f8-2afa-4444-ba24-34df442cd2db","Type":"ContainerDied","Data":"f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739"} Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.216522 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-75c7c68f7d-662gl" event={"ID":"9f7b87f8-2afa-4444-ba24-34df442cd2db","Type":"ContainerDied","Data":"1d61a233593729f3cbdbf4753b186308ef8617c42a27f9d0af1b3f9d733d8a40"} Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.216621 5002 scope.go:117] "RemoveContainer" containerID="80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e" Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.253122 5002 scope.go:117] "RemoveContainer" containerID="f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739" Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.253251 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-api-75c7c68f7d-662gl"] Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.261107 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-api-75c7c68f7d-662gl"] Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.283020 5002 scope.go:117] "RemoveContainer" containerID="68cc6b3f60a338a19fae61c950ee4f551566d723a28fc1ed81d28d1f48c52db4" Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.353300 5002 scope.go:117] "RemoveContainer" containerID="80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e" Dec 03 18:14:06 crc kubenswrapper[5002]: E1203 18:14:06.353894 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e\": container with ID starting with 80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e not found: ID does not exist" containerID="80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e" Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.354015 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e"} err="failed to get container status \"80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e\": rpc error: code = NotFound desc = could not find container \"80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e\": container with ID starting with 80b6b0aa7d182fd06f27db61c599f2fd8e3cbc8125e74b33373b297aa255080e not found: ID does not exist" Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.354092 5002 scope.go:117] "RemoveContainer" containerID="f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739" Dec 03 18:14:06 crc kubenswrapper[5002]: E1203 18:14:06.354440 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739\": container with ID starting with f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739 not found: ID does not exist" containerID="f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739" Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.354473 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739"} err="failed to get container status \"f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739\": rpc error: code = NotFound desc = could not find container \"f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739\": container with ID starting with f1fd7e6ab77b4ac3fc44a081e129845c7521694683a3c16cb22d7eba9be97739 not found: ID does not exist" Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.354514 5002 scope.go:117] "RemoveContainer" containerID="68cc6b3f60a338a19fae61c950ee4f551566d723a28fc1ed81d28d1f48c52db4" Dec 03 18:14:06 crc kubenswrapper[5002]: E1203 18:14:06.354731 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68cc6b3f60a338a19fae61c950ee4f551566d723a28fc1ed81d28d1f48c52db4\": container with ID starting with 68cc6b3f60a338a19fae61c950ee4f551566d723a28fc1ed81d28d1f48c52db4 not found: ID does not exist" containerID="68cc6b3f60a338a19fae61c950ee4f551566d723a28fc1ed81d28d1f48c52db4" Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.354847 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68cc6b3f60a338a19fae61c950ee4f551566d723a28fc1ed81d28d1f48c52db4"} err="failed to get container status \"68cc6b3f60a338a19fae61c950ee4f551566d723a28fc1ed81d28d1f48c52db4\": rpc error: code = NotFound desc = could not find container \"68cc6b3f60a338a19fae61c950ee4f551566d723a28fc1ed81d28d1f48c52db4\": container with ID starting with 68cc6b3f60a338a19fae61c950ee4f551566d723a28fc1ed81d28d1f48c52db4 not found: ID does not exist" Dec 03 18:14:06 crc kubenswrapper[5002]: I1203 18:14:06.851895 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f7b87f8-2afa-4444-ba24-34df442cd2db" path="/var/lib/kubelet/pods/9f7b87f8-2afa-4444-ba24-34df442cd2db/volumes" Dec 03 18:14:17 crc kubenswrapper[5002]: I1203 18:14:17.840138 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:14:17 crc kubenswrapper[5002]: E1203 18:14:17.841062 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.394717 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cqgq2"] Dec 03 18:14:21 crc kubenswrapper[5002]: E1203 18:14:21.396724 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerName="octavia-api-provider-agent" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.396783 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerName="octavia-api-provider-agent" Dec 03 18:14:21 crc kubenswrapper[5002]: E1203 18:14:21.396816 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerName="init" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.396830 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerName="init" Dec 03 18:14:21 crc kubenswrapper[5002]: E1203 18:14:21.396900 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerName="octavia-api" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.396915 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerName="octavia-api" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.397718 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerName="octavia-api-provider-agent" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.397819 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f7b87f8-2afa-4444-ba24-34df442cd2db" containerName="octavia-api" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.409084 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cqgq2"] Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.409255 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.502020 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34943d34-e629-425e-8483-ef54a59642f6-utilities\") pod \"redhat-operators-cqgq2\" (UID: \"34943d34-e629-425e-8483-ef54a59642f6\") " pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.502278 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34943d34-e629-425e-8483-ef54a59642f6-catalog-content\") pod \"redhat-operators-cqgq2\" (UID: \"34943d34-e629-425e-8483-ef54a59642f6\") " pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.502417 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdqw8\" (UniqueName: \"kubernetes.io/projected/34943d34-e629-425e-8483-ef54a59642f6-kube-api-access-sdqw8\") pod \"redhat-operators-cqgq2\" (UID: \"34943d34-e629-425e-8483-ef54a59642f6\") " pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.604954 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34943d34-e629-425e-8483-ef54a59642f6-utilities\") pod \"redhat-operators-cqgq2\" (UID: \"34943d34-e629-425e-8483-ef54a59642f6\") " pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.605091 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34943d34-e629-425e-8483-ef54a59642f6-catalog-content\") pod \"redhat-operators-cqgq2\" (UID: \"34943d34-e629-425e-8483-ef54a59642f6\") " pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.605154 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdqw8\" (UniqueName: \"kubernetes.io/projected/34943d34-e629-425e-8483-ef54a59642f6-kube-api-access-sdqw8\") pod \"redhat-operators-cqgq2\" (UID: \"34943d34-e629-425e-8483-ef54a59642f6\") " pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.606077 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34943d34-e629-425e-8483-ef54a59642f6-catalog-content\") pod \"redhat-operators-cqgq2\" (UID: \"34943d34-e629-425e-8483-ef54a59642f6\") " pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.606079 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34943d34-e629-425e-8483-ef54a59642f6-utilities\") pod \"redhat-operators-cqgq2\" (UID: \"34943d34-e629-425e-8483-ef54a59642f6\") " pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.633896 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdqw8\" (UniqueName: \"kubernetes.io/projected/34943d34-e629-425e-8483-ef54a59642f6-kube-api-access-sdqw8\") pod \"redhat-operators-cqgq2\" (UID: \"34943d34-e629-425e-8483-ef54a59642f6\") " pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:21 crc kubenswrapper[5002]: I1203 18:14:21.743154 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:22 crc kubenswrapper[5002]: I1203 18:14:22.199991 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cqgq2"] Dec 03 18:14:22 crc kubenswrapper[5002]: I1203 18:14:22.395236 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqgq2" event={"ID":"34943d34-e629-425e-8483-ef54a59642f6","Type":"ContainerStarted","Data":"8e59db2e572b3c9a13bc5978102edfa679c4db2275cf1b7a45f75c8ed82f9b56"} Dec 03 18:14:23 crc kubenswrapper[5002]: I1203 18:14:23.407793 5002 generic.go:334] "Generic (PLEG): container finished" podID="34943d34-e629-425e-8483-ef54a59642f6" containerID="70dc3e03a6e1b8ee66e389abc1170b6d3748b88450daf1053e4ed83a395c846b" exitCode=0 Dec 03 18:14:23 crc kubenswrapper[5002]: I1203 18:14:23.407857 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqgq2" event={"ID":"34943d34-e629-425e-8483-ef54a59642f6","Type":"ContainerDied","Data":"70dc3e03a6e1b8ee66e389abc1170b6d3748b88450daf1053e4ed83a395c846b"} Dec 03 18:14:25 crc kubenswrapper[5002]: I1203 18:14:25.433139 5002 generic.go:334] "Generic (PLEG): container finished" podID="34943d34-e629-425e-8483-ef54a59642f6" containerID="57f2fcf7271ece5bbabc5389a8583c8680601fc9a8efb6d05cc20375d1807981" exitCode=0 Dec 03 18:14:25 crc kubenswrapper[5002]: I1203 18:14:25.433221 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqgq2" event={"ID":"34943d34-e629-425e-8483-ef54a59642f6","Type":"ContainerDied","Data":"57f2fcf7271ece5bbabc5389a8583c8680601fc9a8efb6d05cc20375d1807981"} Dec 03 18:14:26 crc kubenswrapper[5002]: I1203 18:14:26.446458 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqgq2" event={"ID":"34943d34-e629-425e-8483-ef54a59642f6","Type":"ContainerStarted","Data":"ada1401d50faac6adb5c0268ed4dfb304d6fc34e987da82330be969210118ae5"} Dec 03 18:14:26 crc kubenswrapper[5002]: I1203 18:14:26.466919 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cqgq2" podStartSLOduration=2.993126257 podStartE2EDuration="5.4669028s" podCreationTimestamp="2025-12-03 18:14:21 +0000 UTC" firstStartedPulling="2025-12-03 18:14:23.41042795 +0000 UTC m=+6186.824249838" lastFinishedPulling="2025-12-03 18:14:25.884204493 +0000 UTC m=+6189.298026381" observedRunningTime="2025-12-03 18:14:26.463794776 +0000 UTC m=+6189.877616664" watchObservedRunningTime="2025-12-03 18:14:26.4669028 +0000 UTC m=+6189.880724688" Dec 03 18:14:31 crc kubenswrapper[5002]: I1203 18:14:31.744281 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:31 crc kubenswrapper[5002]: I1203 18:14:31.744942 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:31 crc kubenswrapper[5002]: I1203 18:14:31.812123 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:31 crc kubenswrapper[5002]: I1203 18:14:31.840168 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:14:31 crc kubenswrapper[5002]: E1203 18:14:31.840512 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:14:32 crc kubenswrapper[5002]: I1203 18:14:32.544111 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:32 crc kubenswrapper[5002]: I1203 18:14:32.599029 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cqgq2"] Dec 03 18:14:34 crc kubenswrapper[5002]: I1203 18:14:34.519032 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cqgq2" podUID="34943d34-e629-425e-8483-ef54a59642f6" containerName="registry-server" containerID="cri-o://ada1401d50faac6adb5c0268ed4dfb304d6fc34e987da82330be969210118ae5" gracePeriod=2 Dec 03 18:14:35 crc kubenswrapper[5002]: I1203 18:14:35.532872 5002 generic.go:334] "Generic (PLEG): container finished" podID="34943d34-e629-425e-8483-ef54a59642f6" containerID="ada1401d50faac6adb5c0268ed4dfb304d6fc34e987da82330be969210118ae5" exitCode=0 Dec 03 18:14:35 crc kubenswrapper[5002]: I1203 18:14:35.532957 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqgq2" event={"ID":"34943d34-e629-425e-8483-ef54a59642f6","Type":"ContainerDied","Data":"ada1401d50faac6adb5c0268ed4dfb304d6fc34e987da82330be969210118ae5"} Dec 03 18:14:35 crc kubenswrapper[5002]: I1203 18:14:35.749205 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:35 crc kubenswrapper[5002]: I1203 18:14:35.801562 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdqw8\" (UniqueName: \"kubernetes.io/projected/34943d34-e629-425e-8483-ef54a59642f6-kube-api-access-sdqw8\") pod \"34943d34-e629-425e-8483-ef54a59642f6\" (UID: \"34943d34-e629-425e-8483-ef54a59642f6\") " Dec 03 18:14:35 crc kubenswrapper[5002]: I1203 18:14:35.801831 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34943d34-e629-425e-8483-ef54a59642f6-catalog-content\") pod \"34943d34-e629-425e-8483-ef54a59642f6\" (UID: \"34943d34-e629-425e-8483-ef54a59642f6\") " Dec 03 18:14:35 crc kubenswrapper[5002]: I1203 18:14:35.807579 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34943d34-e629-425e-8483-ef54a59642f6-utilities\") pod \"34943d34-e629-425e-8483-ef54a59642f6\" (UID: \"34943d34-e629-425e-8483-ef54a59642f6\") " Dec 03 18:14:35 crc kubenswrapper[5002]: I1203 18:14:35.808616 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34943d34-e629-425e-8483-ef54a59642f6-utilities" (OuterVolumeSpecName: "utilities") pod "34943d34-e629-425e-8483-ef54a59642f6" (UID: "34943d34-e629-425e-8483-ef54a59642f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:14:35 crc kubenswrapper[5002]: I1203 18:14:35.809038 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34943d34-e629-425e-8483-ef54a59642f6-kube-api-access-sdqw8" (OuterVolumeSpecName: "kube-api-access-sdqw8") pod "34943d34-e629-425e-8483-ef54a59642f6" (UID: "34943d34-e629-425e-8483-ef54a59642f6"). InnerVolumeSpecName "kube-api-access-sdqw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:14:35 crc kubenswrapper[5002]: I1203 18:14:35.911176 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34943d34-e629-425e-8483-ef54a59642f6-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 18:14:35 crc kubenswrapper[5002]: I1203 18:14:35.911205 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdqw8\" (UniqueName: \"kubernetes.io/projected/34943d34-e629-425e-8483-ef54a59642f6-kube-api-access-sdqw8\") on node \"crc\" DevicePath \"\"" Dec 03 18:14:36 crc kubenswrapper[5002]: I1203 18:14:36.260802 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34943d34-e629-425e-8483-ef54a59642f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "34943d34-e629-425e-8483-ef54a59642f6" (UID: "34943d34-e629-425e-8483-ef54a59642f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:14:36 crc kubenswrapper[5002]: I1203 18:14:36.321584 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34943d34-e629-425e-8483-ef54a59642f6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 18:14:36 crc kubenswrapper[5002]: I1203 18:14:36.545014 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqgq2" event={"ID":"34943d34-e629-425e-8483-ef54a59642f6","Type":"ContainerDied","Data":"8e59db2e572b3c9a13bc5978102edfa679c4db2275cf1b7a45f75c8ed82f9b56"} Dec 03 18:14:36 crc kubenswrapper[5002]: I1203 18:14:36.545080 5002 scope.go:117] "RemoveContainer" containerID="ada1401d50faac6adb5c0268ed4dfb304d6fc34e987da82330be969210118ae5" Dec 03 18:14:36 crc kubenswrapper[5002]: I1203 18:14:36.545090 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cqgq2" Dec 03 18:14:36 crc kubenswrapper[5002]: I1203 18:14:36.584347 5002 scope.go:117] "RemoveContainer" containerID="57f2fcf7271ece5bbabc5389a8583c8680601fc9a8efb6d05cc20375d1807981" Dec 03 18:14:36 crc kubenswrapper[5002]: I1203 18:14:36.596130 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cqgq2"] Dec 03 18:14:36 crc kubenswrapper[5002]: I1203 18:14:36.609643 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cqgq2"] Dec 03 18:14:36 crc kubenswrapper[5002]: I1203 18:14:36.612781 5002 scope.go:117] "RemoveContainer" containerID="70dc3e03a6e1b8ee66e389abc1170b6d3748b88450daf1053e4ed83a395c846b" Dec 03 18:14:36 crc kubenswrapper[5002]: I1203 18:14:36.851372 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34943d34-e629-425e-8483-ef54a59642f6" path="/var/lib/kubelet/pods/34943d34-e629-425e-8483-ef54a59642f6/volumes" Dec 03 18:14:46 crc kubenswrapper[5002]: I1203 18:14:46.848284 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:14:46 crc kubenswrapper[5002]: E1203 18:14:46.850273 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.153679 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8"] Dec 03 18:15:00 crc kubenswrapper[5002]: E1203 18:15:00.154626 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34943d34-e629-425e-8483-ef54a59642f6" containerName="extract-content" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.154640 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="34943d34-e629-425e-8483-ef54a59642f6" containerName="extract-content" Dec 03 18:15:00 crc kubenswrapper[5002]: E1203 18:15:00.154666 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34943d34-e629-425e-8483-ef54a59642f6" containerName="registry-server" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.154671 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="34943d34-e629-425e-8483-ef54a59642f6" containerName="registry-server" Dec 03 18:15:00 crc kubenswrapper[5002]: E1203 18:15:00.154683 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34943d34-e629-425e-8483-ef54a59642f6" containerName="extract-utilities" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.154689 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="34943d34-e629-425e-8483-ef54a59642f6" containerName="extract-utilities" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.154893 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="34943d34-e629-425e-8483-ef54a59642f6" containerName="registry-server" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.155777 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.161462 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.162055 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.174143 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8"] Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.243553 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/445689e8-47fb-4fae-9070-4831d2c0151a-config-volume\") pod \"collect-profiles-29413095-7kwp8\" (UID: \"445689e8-47fb-4fae-9070-4831d2c0151a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.243710 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpcxz\" (UniqueName: \"kubernetes.io/projected/445689e8-47fb-4fae-9070-4831d2c0151a-kube-api-access-jpcxz\") pod \"collect-profiles-29413095-7kwp8\" (UID: \"445689e8-47fb-4fae-9070-4831d2c0151a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.244124 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/445689e8-47fb-4fae-9070-4831d2c0151a-secret-volume\") pod \"collect-profiles-29413095-7kwp8\" (UID: \"445689e8-47fb-4fae-9070-4831d2c0151a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.345488 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/445689e8-47fb-4fae-9070-4831d2c0151a-secret-volume\") pod \"collect-profiles-29413095-7kwp8\" (UID: \"445689e8-47fb-4fae-9070-4831d2c0151a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.345806 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/445689e8-47fb-4fae-9070-4831d2c0151a-config-volume\") pod \"collect-profiles-29413095-7kwp8\" (UID: \"445689e8-47fb-4fae-9070-4831d2c0151a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.346675 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/445689e8-47fb-4fae-9070-4831d2c0151a-config-volume\") pod \"collect-profiles-29413095-7kwp8\" (UID: \"445689e8-47fb-4fae-9070-4831d2c0151a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.346865 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpcxz\" (UniqueName: \"kubernetes.io/projected/445689e8-47fb-4fae-9070-4831d2c0151a-kube-api-access-jpcxz\") pod \"collect-profiles-29413095-7kwp8\" (UID: \"445689e8-47fb-4fae-9070-4831d2c0151a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.353884 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/445689e8-47fb-4fae-9070-4831d2c0151a-secret-volume\") pod \"collect-profiles-29413095-7kwp8\" (UID: \"445689e8-47fb-4fae-9070-4831d2c0151a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.365256 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpcxz\" (UniqueName: \"kubernetes.io/projected/445689e8-47fb-4fae-9070-4831d2c0151a-kube-api-access-jpcxz\") pod \"collect-profiles-29413095-7kwp8\" (UID: \"445689e8-47fb-4fae-9070-4831d2c0151a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:00 crc kubenswrapper[5002]: I1203 18:15:00.502910 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:01 crc kubenswrapper[5002]: I1203 18:15:01.002130 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8"] Dec 03 18:15:01 crc kubenswrapper[5002]: I1203 18:15:01.794359 5002 generic.go:334] "Generic (PLEG): container finished" podID="445689e8-47fb-4fae-9070-4831d2c0151a" containerID="c4fb79e4b30b70bab45b7fcfbeea8550cd41e217f7e61b614a763f84f963abba" exitCode=0 Dec 03 18:15:01 crc kubenswrapper[5002]: I1203 18:15:01.794543 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" event={"ID":"445689e8-47fb-4fae-9070-4831d2c0151a","Type":"ContainerDied","Data":"c4fb79e4b30b70bab45b7fcfbeea8550cd41e217f7e61b614a763f84f963abba"} Dec 03 18:15:01 crc kubenswrapper[5002]: I1203 18:15:01.794680 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" event={"ID":"445689e8-47fb-4fae-9070-4831d2c0151a","Type":"ContainerStarted","Data":"b07328e3ff5535e8f363c06d04e3359b6d698da6b7c4dea79f8db3e067ab0620"} Dec 03 18:15:01 crc kubenswrapper[5002]: I1203 18:15:01.840707 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:15:01 crc kubenswrapper[5002]: E1203 18:15:01.841219 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.270987 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.413571 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/445689e8-47fb-4fae-9070-4831d2c0151a-secret-volume\") pod \"445689e8-47fb-4fae-9070-4831d2c0151a\" (UID: \"445689e8-47fb-4fae-9070-4831d2c0151a\") " Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.415044 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpcxz\" (UniqueName: \"kubernetes.io/projected/445689e8-47fb-4fae-9070-4831d2c0151a-kube-api-access-jpcxz\") pod \"445689e8-47fb-4fae-9070-4831d2c0151a\" (UID: \"445689e8-47fb-4fae-9070-4831d2c0151a\") " Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.415570 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/445689e8-47fb-4fae-9070-4831d2c0151a-config-volume\") pod \"445689e8-47fb-4fae-9070-4831d2c0151a\" (UID: \"445689e8-47fb-4fae-9070-4831d2c0151a\") " Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.416489 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/445689e8-47fb-4fae-9070-4831d2c0151a-config-volume" (OuterVolumeSpecName: "config-volume") pod "445689e8-47fb-4fae-9070-4831d2c0151a" (UID: "445689e8-47fb-4fae-9070-4831d2c0151a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.420302 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/445689e8-47fb-4fae-9070-4831d2c0151a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "445689e8-47fb-4fae-9070-4831d2c0151a" (UID: "445689e8-47fb-4fae-9070-4831d2c0151a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.420307 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/445689e8-47fb-4fae-9070-4831d2c0151a-kube-api-access-jpcxz" (OuterVolumeSpecName: "kube-api-access-jpcxz") pod "445689e8-47fb-4fae-9070-4831d2c0151a" (UID: "445689e8-47fb-4fae-9070-4831d2c0151a"). InnerVolumeSpecName "kube-api-access-jpcxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.518242 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/445689e8-47fb-4fae-9070-4831d2c0151a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.518288 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpcxz\" (UniqueName: \"kubernetes.io/projected/445689e8-47fb-4fae-9070-4831d2c0151a-kube-api-access-jpcxz\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.518299 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/445689e8-47fb-4fae-9070-4831d2c0151a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.829976 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" event={"ID":"445689e8-47fb-4fae-9070-4831d2c0151a","Type":"ContainerDied","Data":"b07328e3ff5535e8f363c06d04e3359b6d698da6b7c4dea79f8db3e067ab0620"} Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.830010 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b07328e3ff5535e8f363c06d04e3359b6d698da6b7c4dea79f8db3e067ab0620" Dec 03 18:15:03 crc kubenswrapper[5002]: I1203 18:15:03.830063 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413095-7kwp8" Dec 03 18:15:04 crc kubenswrapper[5002]: I1203 18:15:04.345577 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6"] Dec 03 18:15:04 crc kubenswrapper[5002]: I1203 18:15:04.355442 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413050-pz7m6"] Dec 03 18:15:04 crc kubenswrapper[5002]: I1203 18:15:04.851669 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5fa8512-9046-4b25-960f-5b9c4774c48b" path="/var/lib/kubelet/pods/c5fa8512-9046-4b25-960f-5b9c4774c48b/volumes" Dec 03 18:15:07 crc kubenswrapper[5002]: I1203 18:15:07.261301 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-56c9f55b99-lrdhb"] Dec 03 18:15:07 crc kubenswrapper[5002]: I1203 18:15:07.261889 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" podUID="a6a8fc82-5ff1-404b-85bf-4839d87588d1" containerName="octavia-amphora-httpd" containerID="cri-o://cd37b19fd16dc01346c08af0586e14c9a42aeb1a0efc81057f7b4c74858d383e" gracePeriod=30 Dec 03 18:15:07 crc kubenswrapper[5002]: I1203 18:15:07.938145 5002 generic.go:334] "Generic (PLEG): container finished" podID="a6a8fc82-5ff1-404b-85bf-4839d87588d1" containerID="cd37b19fd16dc01346c08af0586e14c9a42aeb1a0efc81057f7b4c74858d383e" exitCode=0 Dec 03 18:15:07 crc kubenswrapper[5002]: I1203 18:15:07.938396 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" event={"ID":"a6a8fc82-5ff1-404b-85bf-4839d87588d1","Type":"ContainerDied","Data":"cd37b19fd16dc01346c08af0586e14c9a42aeb1a0efc81057f7b4c74858d383e"} Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.051016 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.118259 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a6a8fc82-5ff1-404b-85bf-4839d87588d1-amphora-image\") pod \"a6a8fc82-5ff1-404b-85bf-4839d87588d1\" (UID: \"a6a8fc82-5ff1-404b-85bf-4839d87588d1\") " Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.118401 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a6a8fc82-5ff1-404b-85bf-4839d87588d1-httpd-config\") pod \"a6a8fc82-5ff1-404b-85bf-4839d87588d1\" (UID: \"a6a8fc82-5ff1-404b-85bf-4839d87588d1\") " Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.154415 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6a8fc82-5ff1-404b-85bf-4839d87588d1-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "a6a8fc82-5ff1-404b-85bf-4839d87588d1" (UID: "a6a8fc82-5ff1-404b-85bf-4839d87588d1"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.220955 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a6a8fc82-5ff1-404b-85bf-4839d87588d1-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.229179 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6a8fc82-5ff1-404b-85bf-4839d87588d1-amphora-image" (OuterVolumeSpecName: "amphora-image") pod "a6a8fc82-5ff1-404b-85bf-4839d87588d1" (UID: "a6a8fc82-5ff1-404b-85bf-4839d87588d1"). InnerVolumeSpecName "amphora-image". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.323053 5002 reconciler_common.go:293] "Volume detached for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a6a8fc82-5ff1-404b-85bf-4839d87588d1-amphora-image\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.957145 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" event={"ID":"a6a8fc82-5ff1-404b-85bf-4839d87588d1","Type":"ContainerDied","Data":"dcfa7dd498c990dbc9b333df4108334e4e3fbc668c65127e3b21beb92b6937ae"} Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.957528 5002 scope.go:117] "RemoveContainer" containerID="cd37b19fd16dc01346c08af0586e14c9a42aeb1a0efc81057f7b4c74858d383e" Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.957712 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-56c9f55b99-lrdhb" Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.986179 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-56c9f55b99-lrdhb"] Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.987318 5002 scope.go:117] "RemoveContainer" containerID="0b7299b36eb6eeee9886e9442ae4bb615ada47ae21f563b2256194b44996126a" Dec 03 18:15:08 crc kubenswrapper[5002]: I1203 18:15:08.996108 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-image-upload-56c9f55b99-lrdhb"] Dec 03 18:15:10 crc kubenswrapper[5002]: I1203 18:15:10.853115 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6a8fc82-5ff1-404b-85bf-4839d87588d1" path="/var/lib/kubelet/pods/a6a8fc82-5ff1-404b-85bf-4839d87588d1/volumes" Dec 03 18:15:15 crc kubenswrapper[5002]: I1203 18:15:15.840543 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:15:15 crc kubenswrapper[5002]: E1203 18:15:15.841402 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.331360 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-56c9f55b99-sz5gd"] Dec 03 18:15:17 crc kubenswrapper[5002]: E1203 18:15:17.332283 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6a8fc82-5ff1-404b-85bf-4839d87588d1" containerName="octavia-amphora-httpd" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.332296 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6a8fc82-5ff1-404b-85bf-4839d87588d1" containerName="octavia-amphora-httpd" Dec 03 18:15:17 crc kubenswrapper[5002]: E1203 18:15:17.332306 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="445689e8-47fb-4fae-9070-4831d2c0151a" containerName="collect-profiles" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.332313 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="445689e8-47fb-4fae-9070-4831d2c0151a" containerName="collect-profiles" Dec 03 18:15:17 crc kubenswrapper[5002]: E1203 18:15:17.332330 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6a8fc82-5ff1-404b-85bf-4839d87588d1" containerName="init" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.332336 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6a8fc82-5ff1-404b-85bf-4839d87588d1" containerName="init" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.332518 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="445689e8-47fb-4fae-9070-4831d2c0151a" containerName="collect-profiles" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.332541 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6a8fc82-5ff1-404b-85bf-4839d87588d1" containerName="octavia-amphora-httpd" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.333607 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.340684 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-56c9f55b99-sz5gd"] Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.341933 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.390299 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5fe45db2-057b-47c6-8dbc-310ada8df8bd-httpd-config\") pod \"octavia-image-upload-56c9f55b99-sz5gd\" (UID: \"5fe45db2-057b-47c6-8dbc-310ada8df8bd\") " pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.390433 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/5fe45db2-057b-47c6-8dbc-310ada8df8bd-amphora-image\") pod \"octavia-image-upload-56c9f55b99-sz5gd\" (UID: \"5fe45db2-057b-47c6-8dbc-310ada8df8bd\") " pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.492824 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5fe45db2-057b-47c6-8dbc-310ada8df8bd-httpd-config\") pod \"octavia-image-upload-56c9f55b99-sz5gd\" (UID: \"5fe45db2-057b-47c6-8dbc-310ada8df8bd\") " pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.493154 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/5fe45db2-057b-47c6-8dbc-310ada8df8bd-amphora-image\") pod \"octavia-image-upload-56c9f55b99-sz5gd\" (UID: \"5fe45db2-057b-47c6-8dbc-310ada8df8bd\") " pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.494564 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/5fe45db2-057b-47c6-8dbc-310ada8df8bd-amphora-image\") pod \"octavia-image-upload-56c9f55b99-sz5gd\" (UID: \"5fe45db2-057b-47c6-8dbc-310ada8df8bd\") " pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.504399 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5fe45db2-057b-47c6-8dbc-310ada8df8bd-httpd-config\") pod \"octavia-image-upload-56c9f55b99-sz5gd\" (UID: \"5fe45db2-057b-47c6-8dbc-310ada8df8bd\") " pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" Dec 03 18:15:17 crc kubenswrapper[5002]: I1203 18:15:17.667042 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" Dec 03 18:15:18 crc kubenswrapper[5002]: I1203 18:15:18.174300 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-56c9f55b99-sz5gd"] Dec 03 18:15:19 crc kubenswrapper[5002]: I1203 18:15:19.041499 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" event={"ID":"5fe45db2-057b-47c6-8dbc-310ada8df8bd","Type":"ContainerStarted","Data":"ff041ebe7367e27ea8ff679651f38c806874e22f908ddf7ce196ab3bdd6d5d3e"} Dec 03 18:15:19 crc kubenswrapper[5002]: I1203 18:15:19.042006 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" event={"ID":"5fe45db2-057b-47c6-8dbc-310ada8df8bd","Type":"ContainerStarted","Data":"a9ff7b2b85c5763c3c1250921c9713d0c62c12e8f1f95c92dfcf6c743e5bda85"} Dec 03 18:15:20 crc kubenswrapper[5002]: I1203 18:15:20.053448 5002 generic.go:334] "Generic (PLEG): container finished" podID="5fe45db2-057b-47c6-8dbc-310ada8df8bd" containerID="ff041ebe7367e27ea8ff679651f38c806874e22f908ddf7ce196ab3bdd6d5d3e" exitCode=0 Dec 03 18:15:20 crc kubenswrapper[5002]: I1203 18:15:20.053494 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" event={"ID":"5fe45db2-057b-47c6-8dbc-310ada8df8bd","Type":"ContainerDied","Data":"ff041ebe7367e27ea8ff679651f38c806874e22f908ddf7ce196ab3bdd6d5d3e"} Dec 03 18:15:21 crc kubenswrapper[5002]: I1203 18:15:21.064839 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" event={"ID":"5fe45db2-057b-47c6-8dbc-310ada8df8bd","Type":"ContainerStarted","Data":"af87a21d7c0c1ad8da531ee5708fe1388291303b5f8463b7423258f8c4138b3a"} Dec 03 18:15:21 crc kubenswrapper[5002]: I1203 18:15:21.084282 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-56c9f55b99-sz5gd" podStartSLOduration=3.604655545 podStartE2EDuration="4.084266026s" podCreationTimestamp="2025-12-03 18:15:17 +0000 UTC" firstStartedPulling="2025-12-03 18:15:18.182802753 +0000 UTC m=+6241.596624651" lastFinishedPulling="2025-12-03 18:15:18.662413244 +0000 UTC m=+6242.076235132" observedRunningTime="2025-12-03 18:15:21.081407079 +0000 UTC m=+6244.495228967" watchObservedRunningTime="2025-12-03 18:15:21.084266026 +0000 UTC m=+6244.498087914" Dec 03 18:15:24 crc kubenswrapper[5002]: I1203 18:15:24.032991 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-a1a9-account-create-update-9pdzb"] Dec 03 18:15:24 crc kubenswrapper[5002]: I1203 18:15:24.044686 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-fdnpl"] Dec 03 18:15:24 crc kubenswrapper[5002]: I1203 18:15:24.053624 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-fdnpl"] Dec 03 18:15:24 crc kubenswrapper[5002]: I1203 18:15:24.063129 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-a1a9-account-create-update-9pdzb"] Dec 03 18:15:24 crc kubenswrapper[5002]: I1203 18:15:24.851647 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="673ac43c-540d-4675-bcd2-328da5e9a17e" path="/var/lib/kubelet/pods/673ac43c-540d-4675-bcd2-328da5e9a17e/volumes" Dec 03 18:15:24 crc kubenswrapper[5002]: I1203 18:15:24.853288 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eec77f3a-6569-4f1e-972a-004d6511941c" path="/var/lib/kubelet/pods/eec77f3a-6569-4f1e-972a-004d6511941c/volumes" Dec 03 18:15:27 crc kubenswrapper[5002]: I1203 18:15:27.840781 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:15:27 crc kubenswrapper[5002]: E1203 18:15:27.841492 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:15:31 crc kubenswrapper[5002]: I1203 18:15:31.051003 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-pb6mc"] Dec 03 18:15:31 crc kubenswrapper[5002]: I1203 18:15:31.061932 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-pb6mc"] Dec 03 18:15:32 crc kubenswrapper[5002]: I1203 18:15:32.857347 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bf679aa-747a-461b-9ead-521e718c9bdd" path="/var/lib/kubelet/pods/4bf679aa-747a-461b-9ead-521e718c9bdd/volumes" Dec 03 18:15:39 crc kubenswrapper[5002]: I1203 18:15:39.841351 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:15:39 crc kubenswrapper[5002]: E1203 18:15:39.842278 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:15:43 crc kubenswrapper[5002]: I1203 18:15:43.671180 5002 scope.go:117] "RemoveContainer" containerID="3403585b659d9fb9dadb1df3e0ec0a35ee461b510368829c065d843322e8d231" Dec 03 18:15:43 crc kubenswrapper[5002]: I1203 18:15:43.728969 5002 scope.go:117] "RemoveContainer" containerID="50df0b2db5432a8718c0d2498dc56ecfb3f9a08b76ba24c685c2b85b57680ce1" Dec 03 18:15:43 crc kubenswrapper[5002]: I1203 18:15:43.783846 5002 scope.go:117] "RemoveContainer" containerID="8c145f7f66a01193762b70226f1c1a68465edbaf58e20bb2358f8e36e342900e" Dec 03 18:15:43 crc kubenswrapper[5002]: I1203 18:15:43.830044 5002 scope.go:117] "RemoveContainer" containerID="ea28dc6b7dd75477d2ce83d978b4b1152984b363d7f393d7522c487bc0b499b1" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.616912 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6c89677697-dsn4d"] Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.619159 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.621724 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.629099 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.629625 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.629736 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-rs8pg" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.632487 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6c89677697-dsn4d"] Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.667108 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.667396 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0e72f45c-d84e-49b6-bd09-faf99c184f8e" containerName="glance-log" containerID="cri-o://229c6cf1fce60475a3ae4f0cd39f36ee195ec22cbee67ff139b8578f04b9fe55" gracePeriod=30 Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.667908 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0e72f45c-d84e-49b6-bd09-faf99c184f8e" containerName="glance-httpd" containerID="cri-o://66de54d76af31b2913e68e652216415e395a8c22c89e7bb2629c3399cb430057" gracePeriod=30 Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.706185 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/887528d1-faac-421f-915c-3e2ff6f57064-horizon-secret-key\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.706261 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/887528d1-faac-421f-915c-3e2ff6f57064-logs\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.706307 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/887528d1-faac-421f-915c-3e2ff6f57064-scripts\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.706332 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9btxg\" (UniqueName: \"kubernetes.io/projected/887528d1-faac-421f-915c-3e2ff6f57064-kube-api-access-9btxg\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.706355 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/887528d1-faac-421f-915c-3e2ff6f57064-config-data\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.706450 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-678b67796f-5sffs"] Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.708275 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.745599 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-678b67796f-5sffs"] Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.771409 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.773410 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d8902c04-7626-46d3-a514-9e3149ba3020" containerName="glance-log" containerID="cri-o://67c245d3b6da80f64e2dc1080b95102c1606568b44fe7704cc95cc88eb268df0" gracePeriod=30 Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.773612 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d8902c04-7626-46d3-a514-9e3149ba3020" containerName="glance-httpd" containerID="cri-o://d088a4958ae42d3b7f345a7c1f4652bf3e39a7f295405fb6d700d60379663dba" gracePeriod=30 Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.808412 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ea39ebd5-5040-448e-9e93-1e222a8da1ed-scripts\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.808860 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/887528d1-faac-421f-915c-3e2ff6f57064-logs\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.808989 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ea39ebd5-5040-448e-9e93-1e222a8da1ed-horizon-secret-key\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.809106 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/887528d1-faac-421f-915c-3e2ff6f57064-scripts\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.809208 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9btxg\" (UniqueName: \"kubernetes.io/projected/887528d1-faac-421f-915c-3e2ff6f57064-kube-api-access-9btxg\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.809310 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/887528d1-faac-421f-915c-3e2ff6f57064-config-data\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.809453 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea39ebd5-5040-448e-9e93-1e222a8da1ed-logs\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.809603 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ea39ebd5-5040-448e-9e93-1e222a8da1ed-config-data\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.810014 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/887528d1-faac-421f-915c-3e2ff6f57064-logs\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.810213 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/887528d1-faac-421f-915c-3e2ff6f57064-horizon-secret-key\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.810265 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqqw4\" (UniqueName: \"kubernetes.io/projected/ea39ebd5-5040-448e-9e93-1e222a8da1ed-kube-api-access-vqqw4\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.810786 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/887528d1-faac-421f-915c-3e2ff6f57064-scripts\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.811829 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/887528d1-faac-421f-915c-3e2ff6f57064-config-data\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.815339 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/887528d1-faac-421f-915c-3e2ff6f57064-horizon-secret-key\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.823725 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9btxg\" (UniqueName: \"kubernetes.io/projected/887528d1-faac-421f-915c-3e2ff6f57064-kube-api-access-9btxg\") pod \"horizon-6c89677697-dsn4d\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.912594 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqqw4\" (UniqueName: \"kubernetes.io/projected/ea39ebd5-5040-448e-9e93-1e222a8da1ed-kube-api-access-vqqw4\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.912658 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ea39ebd5-5040-448e-9e93-1e222a8da1ed-scripts\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.912697 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ea39ebd5-5040-448e-9e93-1e222a8da1ed-horizon-secret-key\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.912822 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea39ebd5-5040-448e-9e93-1e222a8da1ed-logs\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.912862 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ea39ebd5-5040-448e-9e93-1e222a8da1ed-config-data\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.913691 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea39ebd5-5040-448e-9e93-1e222a8da1ed-logs\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.915260 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ea39ebd5-5040-448e-9e93-1e222a8da1ed-config-data\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.917073 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ea39ebd5-5040-448e-9e93-1e222a8da1ed-scripts\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.918176 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ea39ebd5-5040-448e-9e93-1e222a8da1ed-horizon-secret-key\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.932213 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqqw4\" (UniqueName: \"kubernetes.io/projected/ea39ebd5-5040-448e-9e93-1e222a8da1ed-kube-api-access-vqqw4\") pod \"horizon-678b67796f-5sffs\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:49 crc kubenswrapper[5002]: I1203 18:15:49.957639 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:15:50 crc kubenswrapper[5002]: I1203 18:15:50.057495 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:15:50 crc kubenswrapper[5002]: I1203 18:15:50.376898 5002 generic.go:334] "Generic (PLEG): container finished" podID="d8902c04-7626-46d3-a514-9e3149ba3020" containerID="67c245d3b6da80f64e2dc1080b95102c1606568b44fe7704cc95cc88eb268df0" exitCode=143 Dec 03 18:15:50 crc kubenswrapper[5002]: I1203 18:15:50.376979 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d8902c04-7626-46d3-a514-9e3149ba3020","Type":"ContainerDied","Data":"67c245d3b6da80f64e2dc1080b95102c1606568b44fe7704cc95cc88eb268df0"} Dec 03 18:15:50 crc kubenswrapper[5002]: I1203 18:15:50.378961 5002 generic.go:334] "Generic (PLEG): container finished" podID="0e72f45c-d84e-49b6-bd09-faf99c184f8e" containerID="229c6cf1fce60475a3ae4f0cd39f36ee195ec22cbee67ff139b8578f04b9fe55" exitCode=143 Dec 03 18:15:50 crc kubenswrapper[5002]: I1203 18:15:50.378992 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0e72f45c-d84e-49b6-bd09-faf99c184f8e","Type":"ContainerDied","Data":"229c6cf1fce60475a3ae4f0cd39f36ee195ec22cbee67ff139b8578f04b9fe55"} Dec 03 18:15:50 crc kubenswrapper[5002]: I1203 18:15:50.428102 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6c89677697-dsn4d"] Dec 03 18:15:50 crc kubenswrapper[5002]: W1203 18:15:50.430520 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod887528d1_faac_421f_915c_3e2ff6f57064.slice/crio-e7212811eed2c9849fd07c7b6779d7be4de42b51cf696e23e6c9b2fb1ee83051 WatchSource:0}: Error finding container e7212811eed2c9849fd07c7b6779d7be4de42b51cf696e23e6c9b2fb1ee83051: Status 404 returned error can't find the container with id e7212811eed2c9849fd07c7b6779d7be4de42b51cf696e23e6c9b2fb1ee83051 Dec 03 18:15:50 crc kubenswrapper[5002]: I1203 18:15:50.572150 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-678b67796f-5sffs"] Dec 03 18:15:50 crc kubenswrapper[5002]: W1203 18:15:50.576361 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea39ebd5_5040_448e_9e93_1e222a8da1ed.slice/crio-7f060ef01b55427f6195d4b403ee65ced0d681d9c7d86f37a939b22ba7792cad WatchSource:0}: Error finding container 7f060ef01b55427f6195d4b403ee65ced0d681d9c7d86f37a939b22ba7792cad: Status 404 returned error can't find the container with id 7f060ef01b55427f6195d4b403ee65ced0d681d9c7d86f37a939b22ba7792cad Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.326553 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-678b67796f-5sffs"] Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.379642 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5f795f5b86-4bds9"] Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.382383 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.391601 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5f795f5b86-4bds9"] Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.395685 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.412317 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-678b67796f-5sffs" event={"ID":"ea39ebd5-5040-448e-9e93-1e222a8da1ed","Type":"ContainerStarted","Data":"7f060ef01b55427f6195d4b403ee65ced0d681d9c7d86f37a939b22ba7792cad"} Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.413443 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c89677697-dsn4d" event={"ID":"887528d1-faac-421f-915c-3e2ff6f57064","Type":"ContainerStarted","Data":"e7212811eed2c9849fd07c7b6779d7be4de42b51cf696e23e6c9b2fb1ee83051"} Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.444843 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-combined-ca-bundle\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.444968 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-config-data\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.445015 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-logs\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.445036 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-horizon-tls-certs\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.445071 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7fmd\" (UniqueName: \"kubernetes.io/projected/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-kube-api-access-c7fmd\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.445108 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-horizon-secret-key\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.445138 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-scripts\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.500147 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6c89677697-dsn4d"] Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.518588 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-54665f9df8-dr6n7"] Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.520699 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.529511 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-54665f9df8-dr6n7"] Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.548380 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-config-data\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.548506 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-horizon-tls-certs\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.548539 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-logs\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.548629 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-horizon-tls-certs\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.548705 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7fmd\" (UniqueName: \"kubernetes.io/projected/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-kube-api-access-c7fmd\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.548732 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-horizon-secret-key\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.548795 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-scripts\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.548867 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv2gl\" (UniqueName: \"kubernetes.io/projected/6eb582c6-bdc8-4289-8f44-ee5d3699053d-kube-api-access-rv2gl\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.548938 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6eb582c6-bdc8-4289-8f44-ee5d3699053d-config-data\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.548968 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6eb582c6-bdc8-4289-8f44-ee5d3699053d-scripts\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.548990 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-combined-ca-bundle\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.549038 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-horizon-secret-key\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.549070 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6eb582c6-bdc8-4289-8f44-ee5d3699053d-logs\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.549124 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-logs\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.549147 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-combined-ca-bundle\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.551466 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-config-data\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.554859 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-scripts\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.569583 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-horizon-secret-key\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.569683 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-horizon-tls-certs\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.569690 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-combined-ca-bundle\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.576944 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7fmd\" (UniqueName: \"kubernetes.io/projected/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-kube-api-access-c7fmd\") pod \"horizon-5f795f5b86-4bds9\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.651199 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-horizon-tls-certs\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.651304 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv2gl\" (UniqueName: \"kubernetes.io/projected/6eb582c6-bdc8-4289-8f44-ee5d3699053d-kube-api-access-rv2gl\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.651435 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6eb582c6-bdc8-4289-8f44-ee5d3699053d-config-data\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.651489 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6eb582c6-bdc8-4289-8f44-ee5d3699053d-scripts\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.651527 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-combined-ca-bundle\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.651561 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-horizon-secret-key\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.651614 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6eb582c6-bdc8-4289-8f44-ee5d3699053d-logs\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.652183 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6eb582c6-bdc8-4289-8f44-ee5d3699053d-logs\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.653786 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6eb582c6-bdc8-4289-8f44-ee5d3699053d-scripts\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.655205 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6eb582c6-bdc8-4289-8f44-ee5d3699053d-config-data\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.655718 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-horizon-tls-certs\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.659129 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-combined-ca-bundle\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.664191 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-horizon-secret-key\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.672097 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv2gl\" (UniqueName: \"kubernetes.io/projected/6eb582c6-bdc8-4289-8f44-ee5d3699053d-kube-api-access-rv2gl\") pod \"horizon-54665f9df8-dr6n7\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.714996 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:15:51 crc kubenswrapper[5002]: I1203 18:15:51.853036 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:15:52 crc kubenswrapper[5002]: W1203 18:15:52.187063 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4a741bc_14b4_4d21_b6b0_5cc5a8998eb6.slice/crio-8c8a853e9ca31a7b85d036f1178bf4d5f35939f1cb9b38e747eb244509cc80fa WatchSource:0}: Error finding container 8c8a853e9ca31a7b85d036f1178bf4d5f35939f1cb9b38e747eb244509cc80fa: Status 404 returned error can't find the container with id 8c8a853e9ca31a7b85d036f1178bf4d5f35939f1cb9b38e747eb244509cc80fa Dec 03 18:15:52 crc kubenswrapper[5002]: I1203 18:15:52.192215 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5f795f5b86-4bds9"] Dec 03 18:15:52 crc kubenswrapper[5002]: I1203 18:15:52.345657 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-54665f9df8-dr6n7"] Dec 03 18:15:52 crc kubenswrapper[5002]: I1203 18:15:52.422962 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54665f9df8-dr6n7" event={"ID":"6eb582c6-bdc8-4289-8f44-ee5d3699053d","Type":"ContainerStarted","Data":"a98f89ff6e566ae7d24762a4793c021e899b79256575f61feda0c4eb897567f2"} Dec 03 18:15:52 crc kubenswrapper[5002]: I1203 18:15:52.424707 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f795f5b86-4bds9" event={"ID":"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6","Type":"ContainerStarted","Data":"8c8a853e9ca31a7b85d036f1178bf4d5f35939f1cb9b38e747eb244509cc80fa"} Dec 03 18:15:52 crc kubenswrapper[5002]: I1203 18:15:52.841473 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:15:52 crc kubenswrapper[5002]: E1203 18:15:52.843292 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:15:52 crc kubenswrapper[5002]: I1203 18:15:52.936291 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="d8902c04-7626-46d3-a514-9e3149ba3020" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.1.59:9292/healthcheck\": read tcp 10.217.0.2:45724->10.217.1.59:9292: read: connection reset by peer" Dec 03 18:15:52 crc kubenswrapper[5002]: I1203 18:15:52.936318 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="d8902c04-7626-46d3-a514-9e3149ba3020" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.1.59:9292/healthcheck\": read tcp 10.217.0.2:45740->10.217.1.59:9292: read: connection reset by peer" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.436449 5002 generic.go:334] "Generic (PLEG): container finished" podID="d8902c04-7626-46d3-a514-9e3149ba3020" containerID="d088a4958ae42d3b7f345a7c1f4652bf3e39a7f295405fb6d700d60379663dba" exitCode=0 Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.436544 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d8902c04-7626-46d3-a514-9e3149ba3020","Type":"ContainerDied","Data":"d088a4958ae42d3b7f345a7c1f4652bf3e39a7f295405fb6d700d60379663dba"} Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.436822 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d8902c04-7626-46d3-a514-9e3149ba3020","Type":"ContainerDied","Data":"98dc8a2fdb4e69c9ee9dbbf7219be134cca7ba34346cf79ad3b88368966a2bcb"} Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.436839 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98dc8a2fdb4e69c9ee9dbbf7219be134cca7ba34346cf79ad3b88368966a2bcb" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.439072 5002 generic.go:334] "Generic (PLEG): container finished" podID="0e72f45c-d84e-49b6-bd09-faf99c184f8e" containerID="66de54d76af31b2913e68e652216415e395a8c22c89e7bb2629c3399cb430057" exitCode=0 Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.439104 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0e72f45c-d84e-49b6-bd09-faf99c184f8e","Type":"ContainerDied","Data":"66de54d76af31b2913e68e652216415e395a8c22c89e7bb2629c3399cb430057"} Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.487653 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.617279 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8w6q\" (UniqueName: \"kubernetes.io/projected/d8902c04-7626-46d3-a514-9e3149ba3020-kube-api-access-r8w6q\") pod \"d8902c04-7626-46d3-a514-9e3149ba3020\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.617365 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-config-data\") pod \"d8902c04-7626-46d3-a514-9e3149ba3020\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.617430 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-combined-ca-bundle\") pod \"d8902c04-7626-46d3-a514-9e3149ba3020\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.617464 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-scripts\") pod \"d8902c04-7626-46d3-a514-9e3149ba3020\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.617533 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-internal-tls-certs\") pod \"d8902c04-7626-46d3-a514-9e3149ba3020\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.617582 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d8902c04-7626-46d3-a514-9e3149ba3020-httpd-run\") pod \"d8902c04-7626-46d3-a514-9e3149ba3020\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.617636 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8902c04-7626-46d3-a514-9e3149ba3020-logs\") pod \"d8902c04-7626-46d3-a514-9e3149ba3020\" (UID: \"d8902c04-7626-46d3-a514-9e3149ba3020\") " Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.618812 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8902c04-7626-46d3-a514-9e3149ba3020-logs" (OuterVolumeSpecName: "logs") pod "d8902c04-7626-46d3-a514-9e3149ba3020" (UID: "d8902c04-7626-46d3-a514-9e3149ba3020"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.618986 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8902c04-7626-46d3-a514-9e3149ba3020-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d8902c04-7626-46d3-a514-9e3149ba3020" (UID: "d8902c04-7626-46d3-a514-9e3149ba3020"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.623263 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-scripts" (OuterVolumeSpecName: "scripts") pod "d8902c04-7626-46d3-a514-9e3149ba3020" (UID: "d8902c04-7626-46d3-a514-9e3149ba3020"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.625161 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8902c04-7626-46d3-a514-9e3149ba3020-kube-api-access-r8w6q" (OuterVolumeSpecName: "kube-api-access-r8w6q") pod "d8902c04-7626-46d3-a514-9e3149ba3020" (UID: "d8902c04-7626-46d3-a514-9e3149ba3020"). InnerVolumeSpecName "kube-api-access-r8w6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.668491 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d8902c04-7626-46d3-a514-9e3149ba3020" (UID: "d8902c04-7626-46d3-a514-9e3149ba3020"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.695914 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d8902c04-7626-46d3-a514-9e3149ba3020" (UID: "d8902c04-7626-46d3-a514-9e3149ba3020"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.702710 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-config-data" (OuterVolumeSpecName: "config-data") pod "d8902c04-7626-46d3-a514-9e3149ba3020" (UID: "d8902c04-7626-46d3-a514-9e3149ba3020"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.720578 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8w6q\" (UniqueName: \"kubernetes.io/projected/d8902c04-7626-46d3-a514-9e3149ba3020-kube-api-access-r8w6q\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.720604 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.720614 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.720622 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.720632 5002 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8902c04-7626-46d3-a514-9e3149ba3020-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.720640 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d8902c04-7626-46d3-a514-9e3149ba3020-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:53 crc kubenswrapper[5002]: I1203 18:15:53.720650 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8902c04-7626-46d3-a514-9e3149ba3020-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.446933 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.489427 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.504662 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.517198 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:15:54 crc kubenswrapper[5002]: E1203 18:15:54.518830 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8902c04-7626-46d3-a514-9e3149ba3020" containerName="glance-httpd" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.518860 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8902c04-7626-46d3-a514-9e3149ba3020" containerName="glance-httpd" Dec 03 18:15:54 crc kubenswrapper[5002]: E1203 18:15:54.518878 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8902c04-7626-46d3-a514-9e3149ba3020" containerName="glance-log" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.518887 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8902c04-7626-46d3-a514-9e3149ba3020" containerName="glance-log" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.519104 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8902c04-7626-46d3-a514-9e3149ba3020" containerName="glance-httpd" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.519134 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8902c04-7626-46d3-a514-9e3149ba3020" containerName="glance-log" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.520594 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.522491 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.524485 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.542933 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.684077 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e35719-3587-4625-9499-fbe4c047c6df-scripts\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.684514 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50e35719-3587-4625-9499-fbe4c047c6df-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.684617 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl9ck\" (UniqueName: \"kubernetes.io/projected/50e35719-3587-4625-9499-fbe4c047c6df-kube-api-access-dl9ck\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.684809 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e35719-3587-4625-9499-fbe4c047c6df-config-data\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.684874 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e35719-3587-4625-9499-fbe4c047c6df-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.685021 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50e35719-3587-4625-9499-fbe4c047c6df-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.685174 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50e35719-3587-4625-9499-fbe4c047c6df-logs\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.788804 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50e35719-3587-4625-9499-fbe4c047c6df-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.789168 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50e35719-3587-4625-9499-fbe4c047c6df-logs\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.789277 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e35719-3587-4625-9499-fbe4c047c6df-scripts\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.789320 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50e35719-3587-4625-9499-fbe4c047c6df-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.789458 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl9ck\" (UniqueName: \"kubernetes.io/projected/50e35719-3587-4625-9499-fbe4c047c6df-kube-api-access-dl9ck\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.789698 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e35719-3587-4625-9499-fbe4c047c6df-config-data\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.789742 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e35719-3587-4625-9499-fbe4c047c6df-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.789862 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/50e35719-3587-4625-9499-fbe4c047c6df-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.790219 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50e35719-3587-4625-9499-fbe4c047c6df-logs\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.798896 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e35719-3587-4625-9499-fbe4c047c6df-config-data\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.800453 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e35719-3587-4625-9499-fbe4c047c6df-scripts\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.802094 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50e35719-3587-4625-9499-fbe4c047c6df-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.816512 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e35719-3587-4625-9499-fbe4c047c6df-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.820444 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl9ck\" (UniqueName: \"kubernetes.io/projected/50e35719-3587-4625-9499-fbe4c047c6df-kube-api-access-dl9ck\") pod \"glance-default-internal-api-0\" (UID: \"50e35719-3587-4625-9499-fbe4c047c6df\") " pod="openstack/glance-default-internal-api-0" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.862122 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8902c04-7626-46d3-a514-9e3149ba3020" path="/var/lib/kubelet/pods/d8902c04-7626-46d3-a514-9e3149ba3020/volumes" Dec 03 18:15:54 crc kubenswrapper[5002]: I1203 18:15:54.879116 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.041692 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-lskdt"] Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.058567 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-818a-account-create-update-gdv5h"] Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.068678 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-lskdt"] Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.077132 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-818a-account-create-update-gdv5h"] Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.511428 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0e72f45c-d84e-49b6-bd09-faf99c184f8e","Type":"ContainerDied","Data":"3ffd1c2803810d3ef6b845fcbe3adb490ac193bed0421173394f891b3e29f05b"} Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.511707 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ffd1c2803810d3ef6b845fcbe3adb490ac193bed0421173394f891b3e29f05b" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.558030 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.712198 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-combined-ca-bundle\") pod \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.712275 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-public-tls-certs\") pod \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.712343 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e72f45c-d84e-49b6-bd09-faf99c184f8e-logs\") pod \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.712379 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-scripts\") pod \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.712474 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-config-data\") pod \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.712517 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj6qt\" (UniqueName: \"kubernetes.io/projected/0e72f45c-d84e-49b6-bd09-faf99c184f8e-kube-api-access-fj6qt\") pod \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.712534 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e72f45c-d84e-49b6-bd09-faf99c184f8e-httpd-run\") pod \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\" (UID: \"0e72f45c-d84e-49b6-bd09-faf99c184f8e\") " Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.714377 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e72f45c-d84e-49b6-bd09-faf99c184f8e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0e72f45c-d84e-49b6-bd09-faf99c184f8e" (UID: "0e72f45c-d84e-49b6-bd09-faf99c184f8e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.714557 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e72f45c-d84e-49b6-bd09-faf99c184f8e-logs" (OuterVolumeSpecName: "logs") pod "0e72f45c-d84e-49b6-bd09-faf99c184f8e" (UID: "0e72f45c-d84e-49b6-bd09-faf99c184f8e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.719305 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-scripts" (OuterVolumeSpecName: "scripts") pod "0e72f45c-d84e-49b6-bd09-faf99c184f8e" (UID: "0e72f45c-d84e-49b6-bd09-faf99c184f8e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.719948 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e72f45c-d84e-49b6-bd09-faf99c184f8e-kube-api-access-fj6qt" (OuterVolumeSpecName: "kube-api-access-fj6qt") pod "0e72f45c-d84e-49b6-bd09-faf99c184f8e" (UID: "0e72f45c-d84e-49b6-bd09-faf99c184f8e"). InnerVolumeSpecName "kube-api-access-fj6qt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.750761 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e72f45c-d84e-49b6-bd09-faf99c184f8e" (UID: "0e72f45c-d84e-49b6-bd09-faf99c184f8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.773652 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-config-data" (OuterVolumeSpecName: "config-data") pod "0e72f45c-d84e-49b6-bd09-faf99c184f8e" (UID: "0e72f45c-d84e-49b6-bd09-faf99c184f8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.774477 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0e72f45c-d84e-49b6-bd09-faf99c184f8e" (UID: "0e72f45c-d84e-49b6-bd09-faf99c184f8e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.816090 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.816129 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj6qt\" (UniqueName: \"kubernetes.io/projected/0e72f45c-d84e-49b6-bd09-faf99c184f8e-kube-api-access-fj6qt\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.816141 5002 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e72f45c-d84e-49b6-bd09-faf99c184f8e-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.816149 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.816159 5002 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.816168 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e72f45c-d84e-49b6-bd09-faf99c184f8e-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.816175 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e72f45c-d84e-49b6-bd09-faf99c184f8e-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.853460 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ef4446f-672a-465a-a1a6-e51cc9f5e5f3" path="/var/lib/kubelet/pods/6ef4446f-672a-465a-a1a6-e51cc9f5e5f3/volumes" Dec 03 18:15:58 crc kubenswrapper[5002]: I1203 18:15:58.855606 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec3180de-e934-4cc6-8657-b30a589eebd4" path="/var/lib/kubelet/pods/ec3180de-e934-4cc6-8657-b30a589eebd4/volumes" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.138116 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 18:15:59 crc kubenswrapper[5002]: W1203 18:15:59.159222 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50e35719_3587_4625_9499_fbe4c047c6df.slice/crio-18a401cc7357066f3d593ac940869e1c8551bd99f3dd3984c7d632474d08e0af WatchSource:0}: Error finding container 18a401cc7357066f3d593ac940869e1c8551bd99f3dd3984c7d632474d08e0af: Status 404 returned error can't find the container with id 18a401cc7357066f3d593ac940869e1c8551bd99f3dd3984c7d632474d08e0af Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.523225 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f795f5b86-4bds9" event={"ID":"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6","Type":"ContainerStarted","Data":"0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc"} Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.523543 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f795f5b86-4bds9" event={"ID":"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6","Type":"ContainerStarted","Data":"46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43"} Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.525218 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c89677697-dsn4d" event={"ID":"887528d1-faac-421f-915c-3e2ff6f57064","Type":"ContainerStarted","Data":"db1d6950ec3b647b971fb86c7a6e2ae8c08ac4002cd9f475ab81b24998d5e54c"} Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.525251 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c89677697-dsn4d" event={"ID":"887528d1-faac-421f-915c-3e2ff6f57064","Type":"ContainerStarted","Data":"5e907135a21fcd8afbe6329339c37579f5aa2a9fb19f38dd555dc5023a652d64"} Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.525334 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6c89677697-dsn4d" podUID="887528d1-faac-421f-915c-3e2ff6f57064" containerName="horizon-log" containerID="cri-o://5e907135a21fcd8afbe6329339c37579f5aa2a9fb19f38dd555dc5023a652d64" gracePeriod=30 Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.525372 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6c89677697-dsn4d" podUID="887528d1-faac-421f-915c-3e2ff6f57064" containerName="horizon" containerID="cri-o://db1d6950ec3b647b971fb86c7a6e2ae8c08ac4002cd9f475ab81b24998d5e54c" gracePeriod=30 Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.527696 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54665f9df8-dr6n7" event={"ID":"6eb582c6-bdc8-4289-8f44-ee5d3699053d","Type":"ContainerStarted","Data":"bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266"} Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.527729 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54665f9df8-dr6n7" event={"ID":"6eb582c6-bdc8-4289-8f44-ee5d3699053d","Type":"ContainerStarted","Data":"8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43"} Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.533339 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"50e35719-3587-4625-9499-fbe4c047c6df","Type":"ContainerStarted","Data":"18a401cc7357066f3d593ac940869e1c8551bd99f3dd3984c7d632474d08e0af"} Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.537790 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.537846 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-678b67796f-5sffs" podUID="ea39ebd5-5040-448e-9e93-1e222a8da1ed" containerName="horizon" containerID="cri-o://7e953a1385980131b4073c1602ef50b1f91ab923775b6ff7e191292e7a8f9359" gracePeriod=30 Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.537862 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-678b67796f-5sffs" podUID="ea39ebd5-5040-448e-9e93-1e222a8da1ed" containerName="horizon-log" containerID="cri-o://d6b046fbf09afdc445bf8d517de92625c290f5cbf9e8952d5e7beaa3f1b6a3c9" gracePeriod=30 Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.537795 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-678b67796f-5sffs" event={"ID":"ea39ebd5-5040-448e-9e93-1e222a8da1ed","Type":"ContainerStarted","Data":"7e953a1385980131b4073c1602ef50b1f91ab923775b6ff7e191292e7a8f9359"} Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.538011 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-678b67796f-5sffs" event={"ID":"ea39ebd5-5040-448e-9e93-1e222a8da1ed","Type":"ContainerStarted","Data":"d6b046fbf09afdc445bf8d517de92625c290f5cbf9e8952d5e7beaa3f1b6a3c9"} Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.555368 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5f795f5b86-4bds9" podStartSLOduration=2.200338523 podStartE2EDuration="8.555350463s" podCreationTimestamp="2025-12-03 18:15:51 +0000 UTC" firstStartedPulling="2025-12-03 18:15:52.211907845 +0000 UTC m=+6275.625729733" lastFinishedPulling="2025-12-03 18:15:58.566919785 +0000 UTC m=+6281.980741673" observedRunningTime="2025-12-03 18:15:59.54670837 +0000 UTC m=+6282.960530258" watchObservedRunningTime="2025-12-03 18:15:59.555350463 +0000 UTC m=+6282.969172351" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.576927 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-678b67796f-5sffs" podStartSLOduration=2.630963654 podStartE2EDuration="10.576908163s" podCreationTimestamp="2025-12-03 18:15:49 +0000 UTC" firstStartedPulling="2025-12-03 18:15:50.578543263 +0000 UTC m=+6273.992365151" lastFinishedPulling="2025-12-03 18:15:58.524487772 +0000 UTC m=+6281.938309660" observedRunningTime="2025-12-03 18:15:59.565865305 +0000 UTC m=+6282.979687193" watchObservedRunningTime="2025-12-03 18:15:59.576908163 +0000 UTC m=+6282.990730051" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.595337 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-54665f9df8-dr6n7" podStartSLOduration=2.349715437 podStartE2EDuration="8.595318529s" podCreationTimestamp="2025-12-03 18:15:51 +0000 UTC" firstStartedPulling="2025-12-03 18:15:52.360994952 +0000 UTC m=+6275.774816840" lastFinishedPulling="2025-12-03 18:15:58.606598044 +0000 UTC m=+6282.020419932" observedRunningTime="2025-12-03 18:15:59.586139202 +0000 UTC m=+6282.999961120" watchObservedRunningTime="2025-12-03 18:15:59.595318529 +0000 UTC m=+6283.009140417" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.621512 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6c89677697-dsn4d" podStartSLOduration=2.418379938 podStartE2EDuration="10.621490164s" podCreationTimestamp="2025-12-03 18:15:49 +0000 UTC" firstStartedPulling="2025-12-03 18:15:50.432693424 +0000 UTC m=+6273.846515312" lastFinishedPulling="2025-12-03 18:15:58.63580364 +0000 UTC m=+6282.049625538" observedRunningTime="2025-12-03 18:15:59.61315724 +0000 UTC m=+6283.026979128" watchObservedRunningTime="2025-12-03 18:15:59.621490164 +0000 UTC m=+6283.035312052" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.640490 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.654563 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.665776 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:15:59 crc kubenswrapper[5002]: E1203 18:15:59.666309 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e72f45c-d84e-49b6-bd09-faf99c184f8e" containerName="glance-httpd" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.666329 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e72f45c-d84e-49b6-bd09-faf99c184f8e" containerName="glance-httpd" Dec 03 18:15:59 crc kubenswrapper[5002]: E1203 18:15:59.666345 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e72f45c-d84e-49b6-bd09-faf99c184f8e" containerName="glance-log" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.666352 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e72f45c-d84e-49b6-bd09-faf99c184f8e" containerName="glance-log" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.666533 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e72f45c-d84e-49b6-bd09-faf99c184f8e" containerName="glance-log" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.666565 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e72f45c-d84e-49b6-bd09-faf99c184f8e" containerName="glance-httpd" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.667676 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.670328 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.671053 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.681612 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.741559 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e10e2591-b00b-450f-a558-bc66ee50c347-logs\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.741775 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e10e2591-b00b-450f-a558-bc66ee50c347-config-data\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.741896 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqt6t\" (UniqueName: \"kubernetes.io/projected/e10e2591-b00b-450f-a558-bc66ee50c347-kube-api-access-zqt6t\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.741987 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e10e2591-b00b-450f-a558-bc66ee50c347-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.742418 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10e2591-b00b-450f-a558-bc66ee50c347-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.742535 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e10e2591-b00b-450f-a558-bc66ee50c347-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.742598 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e10e2591-b00b-450f-a558-bc66ee50c347-scripts\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.849465 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10e2591-b00b-450f-a558-bc66ee50c347-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.849552 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e10e2591-b00b-450f-a558-bc66ee50c347-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.849601 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e10e2591-b00b-450f-a558-bc66ee50c347-scripts\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.849696 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e10e2591-b00b-450f-a558-bc66ee50c347-logs\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.849742 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e10e2591-b00b-450f-a558-bc66ee50c347-config-data\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.849803 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqt6t\" (UniqueName: \"kubernetes.io/projected/e10e2591-b00b-450f-a558-bc66ee50c347-kube-api-access-zqt6t\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.849825 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e10e2591-b00b-450f-a558-bc66ee50c347-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.850340 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e10e2591-b00b-450f-a558-bc66ee50c347-logs\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.850818 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e10e2591-b00b-450f-a558-bc66ee50c347-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.862649 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e10e2591-b00b-450f-a558-bc66ee50c347-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.863936 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e10e2591-b00b-450f-a558-bc66ee50c347-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.866815 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e10e2591-b00b-450f-a558-bc66ee50c347-config-data\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.867858 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e10e2591-b00b-450f-a558-bc66ee50c347-scripts\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.871459 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqt6t\" (UniqueName: \"kubernetes.io/projected/e10e2591-b00b-450f-a558-bc66ee50c347-kube-api-access-zqt6t\") pod \"glance-default-external-api-0\" (UID: \"e10e2591-b00b-450f-a558-bc66ee50c347\") " pod="openstack/glance-default-external-api-0" Dec 03 18:15:59 crc kubenswrapper[5002]: I1203 18:15:59.957868 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:16:00 crc kubenswrapper[5002]: I1203 18:16:00.008030 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 18:16:00 crc kubenswrapper[5002]: I1203 18:16:00.058497 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:16:00 crc kubenswrapper[5002]: I1203 18:16:00.551665 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"50e35719-3587-4625-9499-fbe4c047c6df","Type":"ContainerStarted","Data":"3b53e46f484b23b54f79fdc4c37ba81c5b200b1202ca051918f0f5f6635fffab"} Dec 03 18:16:00 crc kubenswrapper[5002]: I1203 18:16:00.856896 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e72f45c-d84e-49b6-bd09-faf99c184f8e" path="/var/lib/kubelet/pods/0e72f45c-d84e-49b6-bd09-faf99c184f8e/volumes" Dec 03 18:16:01 crc kubenswrapper[5002]: I1203 18:16:01.035105 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 18:16:01 crc kubenswrapper[5002]: I1203 18:16:01.570092 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e10e2591-b00b-450f-a558-bc66ee50c347","Type":"ContainerStarted","Data":"1401c045fc2dccf958b4e3b228d47bcd55957c1074602d38784dd5f9bbd448c6"} Dec 03 18:16:01 crc kubenswrapper[5002]: I1203 18:16:01.573168 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"50e35719-3587-4625-9499-fbe4c047c6df","Type":"ContainerStarted","Data":"95a4a81aa91412ab7799886811474f4ae8e1dc29ef3d39ceff1e8707c33fe9b0"} Dec 03 18:16:01 crc kubenswrapper[5002]: I1203 18:16:01.614284 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.614256838 podStartE2EDuration="7.614256838s" podCreationTimestamp="2025-12-03 18:15:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:16:01.602201644 +0000 UTC m=+6285.016023532" watchObservedRunningTime="2025-12-03 18:16:01.614256838 +0000 UTC m=+6285.028078726" Dec 03 18:16:01 crc kubenswrapper[5002]: I1203 18:16:01.715846 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:16:01 crc kubenswrapper[5002]: I1203 18:16:01.717022 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:16:01 crc kubenswrapper[5002]: I1203 18:16:01.853022 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:16:01 crc kubenswrapper[5002]: I1203 18:16:01.854367 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:16:02 crc kubenswrapper[5002]: I1203 18:16:02.584529 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e10e2591-b00b-450f-a558-bc66ee50c347","Type":"ContainerStarted","Data":"d98374d7b659e70e4873c649119f16d87ba2ff23b8a76810b49af4bd094faeeb"} Dec 03 18:16:02 crc kubenswrapper[5002]: I1203 18:16:02.584914 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e10e2591-b00b-450f-a558-bc66ee50c347","Type":"ContainerStarted","Data":"5f4cc8bf99d61f5caf989c5268f3bd31c892913e05ff5d39a72497fe53da2521"} Dec 03 18:16:02 crc kubenswrapper[5002]: I1203 18:16:02.603646 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.603627141 podStartE2EDuration="3.603627141s" podCreationTimestamp="2025-12-03 18:15:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:16:02.600088435 +0000 UTC m=+6286.013910323" watchObservedRunningTime="2025-12-03 18:16:02.603627141 +0000 UTC m=+6286.017449029" Dec 03 18:16:04 crc kubenswrapper[5002]: I1203 18:16:04.880093 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 18:16:04 crc kubenswrapper[5002]: I1203 18:16:04.880785 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 18:16:04 crc kubenswrapper[5002]: I1203 18:16:04.919212 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 18:16:04 crc kubenswrapper[5002]: I1203 18:16:04.921596 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 18:16:05 crc kubenswrapper[5002]: I1203 18:16:05.615158 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 18:16:05 crc kubenswrapper[5002]: I1203 18:16:05.615626 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 18:16:05 crc kubenswrapper[5002]: I1203 18:16:05.840356 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:16:05 crc kubenswrapper[5002]: E1203 18:16:05.840617 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:16:07 crc kubenswrapper[5002]: I1203 18:16:07.032810 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-gnrcz"] Dec 03 18:16:07 crc kubenswrapper[5002]: I1203 18:16:07.045835 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-gnrcz"] Dec 03 18:16:07 crc kubenswrapper[5002]: I1203 18:16:07.639942 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 18:16:07 crc kubenswrapper[5002]: I1203 18:16:07.639975 5002 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 18:16:08 crc kubenswrapper[5002]: I1203 18:16:08.548722 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 18:16:08 crc kubenswrapper[5002]: I1203 18:16:08.553411 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 18:16:08 crc kubenswrapper[5002]: I1203 18:16:08.854053 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13bf5cea-5dcd-4a75-a88b-30215345f16f" path="/var/lib/kubelet/pods/13bf5cea-5dcd-4a75-a88b-30215345f16f/volumes" Dec 03 18:16:10 crc kubenswrapper[5002]: I1203 18:16:10.008724 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 18:16:10 crc kubenswrapper[5002]: I1203 18:16:10.009157 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 18:16:10 crc kubenswrapper[5002]: I1203 18:16:10.051336 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 18:16:10 crc kubenswrapper[5002]: I1203 18:16:10.075341 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 18:16:10 crc kubenswrapper[5002]: I1203 18:16:10.704347 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 18:16:10 crc kubenswrapper[5002]: I1203 18:16:10.704639 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 18:16:11 crc kubenswrapper[5002]: I1203 18:16:11.718770 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5f795f5b86-4bds9" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.113:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.113:8443: connect: connection refused" Dec 03 18:16:11 crc kubenswrapper[5002]: I1203 18:16:11.854708 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-54665f9df8-dr6n7" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.114:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.114:8443: connect: connection refused" Dec 03 18:16:12 crc kubenswrapper[5002]: I1203 18:16:12.586620 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 18:16:12 crc kubenswrapper[5002]: I1203 18:16:12.610741 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 18:16:19 crc kubenswrapper[5002]: I1203 18:16:19.840870 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:16:19 crc kubenswrapper[5002]: E1203 18:16:19.841848 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:16:23 crc kubenswrapper[5002]: I1203 18:16:23.712262 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:16:23 crc kubenswrapper[5002]: I1203 18:16:23.805432 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:16:25 crc kubenswrapper[5002]: I1203 18:16:25.375260 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:16:25 crc kubenswrapper[5002]: I1203 18:16:25.468354 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5f795f5b86-4bds9"] Dec 03 18:16:25 crc kubenswrapper[5002]: I1203 18:16:25.468780 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5f795f5b86-4bds9" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon-log" containerID="cri-o://46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43" gracePeriod=30 Dec 03 18:16:25 crc kubenswrapper[5002]: I1203 18:16:25.468885 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5f795f5b86-4bds9" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon" containerID="cri-o://0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc" gracePeriod=30 Dec 03 18:16:25 crc kubenswrapper[5002]: I1203 18:16:25.483517 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5f795f5b86-4bds9" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.113:8443/dashboard/auth/login/?next=/dashboard/\": EOF" Dec 03 18:16:28 crc kubenswrapper[5002]: I1203 18:16:28.883637 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5f795f5b86-4bds9" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.113:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:45382->10.217.1.113:8443: read: connection reset by peer" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.932239 5002 generic.go:334] "Generic (PLEG): container finished" podID="ea39ebd5-5040-448e-9e93-1e222a8da1ed" containerID="7e953a1385980131b4073c1602ef50b1f91ab923775b6ff7e191292e7a8f9359" exitCode=137 Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.932711 5002 generic.go:334] "Generic (PLEG): container finished" podID="ea39ebd5-5040-448e-9e93-1e222a8da1ed" containerID="d6b046fbf09afdc445bf8d517de92625c290f5cbf9e8952d5e7beaa3f1b6a3c9" exitCode=137 Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.932287 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-678b67796f-5sffs" event={"ID":"ea39ebd5-5040-448e-9e93-1e222a8da1ed","Type":"ContainerDied","Data":"7e953a1385980131b4073c1602ef50b1f91ab923775b6ff7e191292e7a8f9359"} Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.932892 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-678b67796f-5sffs" event={"ID":"ea39ebd5-5040-448e-9e93-1e222a8da1ed","Type":"ContainerDied","Data":"d6b046fbf09afdc445bf8d517de92625c290f5cbf9e8952d5e7beaa3f1b6a3c9"} Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.932924 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-678b67796f-5sffs" event={"ID":"ea39ebd5-5040-448e-9e93-1e222a8da1ed","Type":"ContainerDied","Data":"7f060ef01b55427f6195d4b403ee65ced0d681d9c7d86f37a939b22ba7792cad"} Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.932949 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f060ef01b55427f6195d4b403ee65ced0d681d9c7d86f37a939b22ba7792cad" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.934656 5002 generic.go:334] "Generic (PLEG): container finished" podID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerID="0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc" exitCode=0 Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.934685 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f795f5b86-4bds9" event={"ID":"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6","Type":"ContainerDied","Data":"0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc"} Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.937017 5002 generic.go:334] "Generic (PLEG): container finished" podID="887528d1-faac-421f-915c-3e2ff6f57064" containerID="db1d6950ec3b647b971fb86c7a6e2ae8c08ac4002cd9f475ab81b24998d5e54c" exitCode=137 Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.937029 5002 generic.go:334] "Generic (PLEG): container finished" podID="887528d1-faac-421f-915c-3e2ff6f57064" containerID="5e907135a21fcd8afbe6329339c37579f5aa2a9fb19f38dd555dc5023a652d64" exitCode=137 Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.937041 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c89677697-dsn4d" event={"ID":"887528d1-faac-421f-915c-3e2ff6f57064","Type":"ContainerDied","Data":"db1d6950ec3b647b971fb86c7a6e2ae8c08ac4002cd9f475ab81b24998d5e54c"} Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.937057 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c89677697-dsn4d" event={"ID":"887528d1-faac-421f-915c-3e2ff6f57064","Type":"ContainerDied","Data":"5e907135a21fcd8afbe6329339c37579f5aa2a9fb19f38dd555dc5023a652d64"} Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:29.947476 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.060052 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ea39ebd5-5040-448e-9e93-1e222a8da1ed-scripts\") pod \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.060271 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ea39ebd5-5040-448e-9e93-1e222a8da1ed-horizon-secret-key\") pod \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.060299 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea39ebd5-5040-448e-9e93-1e222a8da1ed-logs\") pod \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.060358 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqqw4\" (UniqueName: \"kubernetes.io/projected/ea39ebd5-5040-448e-9e93-1e222a8da1ed-kube-api-access-vqqw4\") pod \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.060385 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ea39ebd5-5040-448e-9e93-1e222a8da1ed-config-data\") pod \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\" (UID: \"ea39ebd5-5040-448e-9e93-1e222a8da1ed\") " Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.060927 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea39ebd5-5040-448e-9e93-1e222a8da1ed-logs" (OuterVolumeSpecName: "logs") pod "ea39ebd5-5040-448e-9e93-1e222a8da1ed" (UID: "ea39ebd5-5040-448e-9e93-1e222a8da1ed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.065230 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea39ebd5-5040-448e-9e93-1e222a8da1ed-kube-api-access-vqqw4" (OuterVolumeSpecName: "kube-api-access-vqqw4") pod "ea39ebd5-5040-448e-9e93-1e222a8da1ed" (UID: "ea39ebd5-5040-448e-9e93-1e222a8da1ed"). InnerVolumeSpecName "kube-api-access-vqqw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.065536 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea39ebd5-5040-448e-9e93-1e222a8da1ed-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "ea39ebd5-5040-448e-9e93-1e222a8da1ed" (UID: "ea39ebd5-5040-448e-9e93-1e222a8da1ed"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.084860 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea39ebd5-5040-448e-9e93-1e222a8da1ed-scripts" (OuterVolumeSpecName: "scripts") pod "ea39ebd5-5040-448e-9e93-1e222a8da1ed" (UID: "ea39ebd5-5040-448e-9e93-1e222a8da1ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.094581 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea39ebd5-5040-448e-9e93-1e222a8da1ed-config-data" (OuterVolumeSpecName: "config-data") pod "ea39ebd5-5040-448e-9e93-1e222a8da1ed" (UID: "ea39ebd5-5040-448e-9e93-1e222a8da1ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.162275 5002 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ea39ebd5-5040-448e-9e93-1e222a8da1ed-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.162308 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea39ebd5-5040-448e-9e93-1e222a8da1ed-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.162321 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqqw4\" (UniqueName: \"kubernetes.io/projected/ea39ebd5-5040-448e-9e93-1e222a8da1ed-kube-api-access-vqqw4\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.162335 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ea39ebd5-5040-448e-9e93-1e222a8da1ed-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.162347 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ea39ebd5-5040-448e-9e93-1e222a8da1ed-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.491690 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.568904 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/887528d1-faac-421f-915c-3e2ff6f57064-horizon-secret-key\") pod \"887528d1-faac-421f-915c-3e2ff6f57064\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.568986 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9btxg\" (UniqueName: \"kubernetes.io/projected/887528d1-faac-421f-915c-3e2ff6f57064-kube-api-access-9btxg\") pod \"887528d1-faac-421f-915c-3e2ff6f57064\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.569035 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/887528d1-faac-421f-915c-3e2ff6f57064-config-data\") pod \"887528d1-faac-421f-915c-3e2ff6f57064\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.569061 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/887528d1-faac-421f-915c-3e2ff6f57064-scripts\") pod \"887528d1-faac-421f-915c-3e2ff6f57064\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.569114 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/887528d1-faac-421f-915c-3e2ff6f57064-logs\") pod \"887528d1-faac-421f-915c-3e2ff6f57064\" (UID: \"887528d1-faac-421f-915c-3e2ff6f57064\") " Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.569502 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/887528d1-faac-421f-915c-3e2ff6f57064-logs" (OuterVolumeSpecName: "logs") pod "887528d1-faac-421f-915c-3e2ff6f57064" (UID: "887528d1-faac-421f-915c-3e2ff6f57064"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.573578 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/887528d1-faac-421f-915c-3e2ff6f57064-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "887528d1-faac-421f-915c-3e2ff6f57064" (UID: "887528d1-faac-421f-915c-3e2ff6f57064"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.574002 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/887528d1-faac-421f-915c-3e2ff6f57064-kube-api-access-9btxg" (OuterVolumeSpecName: "kube-api-access-9btxg") pod "887528d1-faac-421f-915c-3e2ff6f57064" (UID: "887528d1-faac-421f-915c-3e2ff6f57064"). InnerVolumeSpecName "kube-api-access-9btxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.606110 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/887528d1-faac-421f-915c-3e2ff6f57064-config-data" (OuterVolumeSpecName: "config-data") pod "887528d1-faac-421f-915c-3e2ff6f57064" (UID: "887528d1-faac-421f-915c-3e2ff6f57064"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.607531 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/887528d1-faac-421f-915c-3e2ff6f57064-scripts" (OuterVolumeSpecName: "scripts") pod "887528d1-faac-421f-915c-3e2ff6f57064" (UID: "887528d1-faac-421f-915c-3e2ff6f57064"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.671125 5002 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/887528d1-faac-421f-915c-3e2ff6f57064-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.671173 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9btxg\" (UniqueName: \"kubernetes.io/projected/887528d1-faac-421f-915c-3e2ff6f57064-kube-api-access-9btxg\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.671187 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/887528d1-faac-421f-915c-3e2ff6f57064-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.671202 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/887528d1-faac-421f-915c-3e2ff6f57064-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.671213 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/887528d1-faac-421f-915c-3e2ff6f57064-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.840895 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.953928 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-678b67796f-5sffs" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.954864 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c89677697-dsn4d" Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.955482 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c89677697-dsn4d" event={"ID":"887528d1-faac-421f-915c-3e2ff6f57064","Type":"ContainerDied","Data":"e7212811eed2c9849fd07c7b6779d7be4de42b51cf696e23e6c9b2fb1ee83051"} Dec 03 18:16:30 crc kubenswrapper[5002]: I1203 18:16:30.955536 5002 scope.go:117] "RemoveContainer" containerID="db1d6950ec3b647b971fb86c7a6e2ae8c08ac4002cd9f475ab81b24998d5e54c" Dec 03 18:16:31 crc kubenswrapper[5002]: I1203 18:16:31.006799 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-678b67796f-5sffs"] Dec 03 18:16:31 crc kubenswrapper[5002]: I1203 18:16:31.015464 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-678b67796f-5sffs"] Dec 03 18:16:31 crc kubenswrapper[5002]: I1203 18:16:31.026858 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6c89677697-dsn4d"] Dec 03 18:16:31 crc kubenswrapper[5002]: I1203 18:16:31.034809 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6c89677697-dsn4d"] Dec 03 18:16:31 crc kubenswrapper[5002]: I1203 18:16:31.204942 5002 scope.go:117] "RemoveContainer" containerID="5e907135a21fcd8afbe6329339c37579f5aa2a9fb19f38dd555dc5023a652d64" Dec 03 18:16:31 crc kubenswrapper[5002]: I1203 18:16:31.717402 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5f795f5b86-4bds9" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.113:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.113:8443: connect: connection refused" Dec 03 18:16:31 crc kubenswrapper[5002]: I1203 18:16:31.968316 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"7911f9c0b193aa85df29cc29463813c7f08c105d2a294d7ed9ab3be4f79d1c9b"} Dec 03 18:16:32 crc kubenswrapper[5002]: I1203 18:16:32.890938 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="887528d1-faac-421f-915c-3e2ff6f57064" path="/var/lib/kubelet/pods/887528d1-faac-421f-915c-3e2ff6f57064/volumes" Dec 03 18:16:32 crc kubenswrapper[5002]: I1203 18:16:32.891910 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea39ebd5-5040-448e-9e93-1e222a8da1ed" path="/var/lib/kubelet/pods/ea39ebd5-5040-448e-9e93-1e222a8da1ed/volumes" Dec 03 18:16:41 crc kubenswrapper[5002]: I1203 18:16:41.716832 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5f795f5b86-4bds9" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.113:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.113:8443: connect: connection refused" Dec 03 18:16:44 crc kubenswrapper[5002]: I1203 18:16:44.031694 5002 scope.go:117] "RemoveContainer" containerID="d97f51523fe99c96754f5e9c8d39bca99d8fa2947865bb07d38a5b49beed166c" Dec 03 18:16:44 crc kubenswrapper[5002]: I1203 18:16:44.065773 5002 scope.go:117] "RemoveContainer" containerID="67c245d3b6da80f64e2dc1080b95102c1606568b44fe7704cc95cc88eb268df0" Dec 03 18:16:44 crc kubenswrapper[5002]: I1203 18:16:44.137788 5002 scope.go:117] "RemoveContainer" containerID="d088a4958ae42d3b7f345a7c1f4652bf3e39a7f295405fb6d700d60379663dba" Dec 03 18:16:44 crc kubenswrapper[5002]: I1203 18:16:44.165467 5002 scope.go:117] "RemoveContainer" containerID="21653a3fcbc03467e833df3749b6015213e54c3458acb540a26f717411ac6b5c" Dec 03 18:16:44 crc kubenswrapper[5002]: I1203 18:16:44.189338 5002 scope.go:117] "RemoveContainer" containerID="66de54d76af31b2913e68e652216415e395a8c22c89e7bb2629c3399cb430057" Dec 03 18:16:44 crc kubenswrapper[5002]: I1203 18:16:44.240644 5002 scope.go:117] "RemoveContainer" containerID="628c3d83bc8b8a64686a81c906b1113136cfbeea92a51f9e878ae4065f87d7f3" Dec 03 18:16:44 crc kubenswrapper[5002]: I1203 18:16:44.296826 5002 scope.go:117] "RemoveContainer" containerID="229c6cf1fce60475a3ae4f0cd39f36ee195ec22cbee67ff139b8578f04b9fe55" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.388154 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rd8n9"] Dec 03 18:16:50 crc kubenswrapper[5002]: E1203 18:16:50.389431 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea39ebd5-5040-448e-9e93-1e222a8da1ed" containerName="horizon" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.389448 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea39ebd5-5040-448e-9e93-1e222a8da1ed" containerName="horizon" Dec 03 18:16:50 crc kubenswrapper[5002]: E1203 18:16:50.389470 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887528d1-faac-421f-915c-3e2ff6f57064" containerName="horizon-log" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.389477 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="887528d1-faac-421f-915c-3e2ff6f57064" containerName="horizon-log" Dec 03 18:16:50 crc kubenswrapper[5002]: E1203 18:16:50.389510 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887528d1-faac-421f-915c-3e2ff6f57064" containerName="horizon" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.389518 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="887528d1-faac-421f-915c-3e2ff6f57064" containerName="horizon" Dec 03 18:16:50 crc kubenswrapper[5002]: E1203 18:16:50.389536 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea39ebd5-5040-448e-9e93-1e222a8da1ed" containerName="horizon-log" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.389543 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea39ebd5-5040-448e-9e93-1e222a8da1ed" containerName="horizon-log" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.389774 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="887528d1-faac-421f-915c-3e2ff6f57064" containerName="horizon-log" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.389789 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea39ebd5-5040-448e-9e93-1e222a8da1ed" containerName="horizon" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.389805 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea39ebd5-5040-448e-9e93-1e222a8da1ed" containerName="horizon-log" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.389824 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="887528d1-faac-421f-915c-3e2ff6f57064" containerName="horizon" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.391630 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.422352 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rd8n9"] Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.502795 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/651f5e9b-a1df-401c-9051-bc661918eb75-catalog-content\") pod \"community-operators-rd8n9\" (UID: \"651f5e9b-a1df-401c-9051-bc661918eb75\") " pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.502960 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/651f5e9b-a1df-401c-9051-bc661918eb75-utilities\") pod \"community-operators-rd8n9\" (UID: \"651f5e9b-a1df-401c-9051-bc661918eb75\") " pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.503206 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmdjn\" (UniqueName: \"kubernetes.io/projected/651f5e9b-a1df-401c-9051-bc661918eb75-kube-api-access-hmdjn\") pod \"community-operators-rd8n9\" (UID: \"651f5e9b-a1df-401c-9051-bc661918eb75\") " pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.606565 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/651f5e9b-a1df-401c-9051-bc661918eb75-utilities\") pod \"community-operators-rd8n9\" (UID: \"651f5e9b-a1df-401c-9051-bc661918eb75\") " pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.606774 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmdjn\" (UniqueName: \"kubernetes.io/projected/651f5e9b-a1df-401c-9051-bc661918eb75-kube-api-access-hmdjn\") pod \"community-operators-rd8n9\" (UID: \"651f5e9b-a1df-401c-9051-bc661918eb75\") " pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.606873 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/651f5e9b-a1df-401c-9051-bc661918eb75-catalog-content\") pod \"community-operators-rd8n9\" (UID: \"651f5e9b-a1df-401c-9051-bc661918eb75\") " pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.607482 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/651f5e9b-a1df-401c-9051-bc661918eb75-utilities\") pod \"community-operators-rd8n9\" (UID: \"651f5e9b-a1df-401c-9051-bc661918eb75\") " pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.607592 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/651f5e9b-a1df-401c-9051-bc661918eb75-catalog-content\") pod \"community-operators-rd8n9\" (UID: \"651f5e9b-a1df-401c-9051-bc661918eb75\") " pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.641775 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmdjn\" (UniqueName: \"kubernetes.io/projected/651f5e9b-a1df-401c-9051-bc661918eb75-kube-api-access-hmdjn\") pod \"community-operators-rd8n9\" (UID: \"651f5e9b-a1df-401c-9051-bc661918eb75\") " pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:16:50 crc kubenswrapper[5002]: I1203 18:16:50.729888 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:16:51 crc kubenswrapper[5002]: I1203 18:16:51.247083 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rd8n9"] Dec 03 18:16:51 crc kubenswrapper[5002]: I1203 18:16:51.716396 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5f795f5b86-4bds9" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.113:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.113:8443: connect: connection refused" Dec 03 18:16:52 crc kubenswrapper[5002]: I1203 18:16:52.190356 5002 generic.go:334] "Generic (PLEG): container finished" podID="651f5e9b-a1df-401c-9051-bc661918eb75" containerID="86d752b15508879d905147fa3af8eb85aa9fffd4a7b1afe7cce891aaa317352d" exitCode=0 Dec 03 18:16:52 crc kubenswrapper[5002]: I1203 18:16:52.190421 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rd8n9" event={"ID":"651f5e9b-a1df-401c-9051-bc661918eb75","Type":"ContainerDied","Data":"86d752b15508879d905147fa3af8eb85aa9fffd4a7b1afe7cce891aaa317352d"} Dec 03 18:16:52 crc kubenswrapper[5002]: I1203 18:16:52.190479 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rd8n9" event={"ID":"651f5e9b-a1df-401c-9051-bc661918eb75","Type":"ContainerStarted","Data":"ce05274b6cb64e4abe10fb162d2ae7500a385570c940a52e21575d53755c6f4e"} Dec 03 18:16:52 crc kubenswrapper[5002]: I1203 18:16:52.192570 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 18:16:54 crc kubenswrapper[5002]: I1203 18:16:54.215644 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rd8n9" event={"ID":"651f5e9b-a1df-401c-9051-bc661918eb75","Type":"ContainerStarted","Data":"90eb3cb4281daad886bae4594391be43309b0d20752cb67f74a30f2858a2b5bd"} Dec 03 18:16:55 crc kubenswrapper[5002]: I1203 18:16:55.256819 5002 generic.go:334] "Generic (PLEG): container finished" podID="651f5e9b-a1df-401c-9051-bc661918eb75" containerID="90eb3cb4281daad886bae4594391be43309b0d20752cb67f74a30f2858a2b5bd" exitCode=0 Dec 03 18:16:55 crc kubenswrapper[5002]: I1203 18:16:55.256961 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rd8n9" event={"ID":"651f5e9b-a1df-401c-9051-bc661918eb75","Type":"ContainerDied","Data":"90eb3cb4281daad886bae4594391be43309b0d20752cb67f74a30f2858a2b5bd"} Dec 03 18:16:55 crc kubenswrapper[5002]: W1203 18:16:55.509258 5002 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod651f5e9b_a1df_401c_9051_bc661918eb75.slice/crio-conmon-86d752b15508879d905147fa3af8eb85aa9fffd4a7b1afe7cce891aaa317352d.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod651f5e9b_a1df_401c_9051_bc661918eb75.slice/crio-conmon-86d752b15508879d905147fa3af8eb85aa9fffd4a7b1afe7cce891aaa317352d.scope: no such file or directory Dec 03 18:16:55 crc kubenswrapper[5002]: W1203 18:16:55.509315 5002 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod651f5e9b_a1df_401c_9051_bc661918eb75.slice/crio-86d752b15508879d905147fa3af8eb85aa9fffd4a7b1afe7cce891aaa317352d.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod651f5e9b_a1df_401c_9051_bc661918eb75.slice/crio-86d752b15508879d905147fa3af8eb85aa9fffd4a7b1afe7cce891aaa317352d.scope: no such file or directory Dec 03 18:16:55 crc kubenswrapper[5002]: W1203 18:16:55.509333 5002 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod651f5e9b_a1df_401c_9051_bc661918eb75.slice/crio-conmon-90eb3cb4281daad886bae4594391be43309b0d20752cb67f74a30f2858a2b5bd.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod651f5e9b_a1df_401c_9051_bc661918eb75.slice/crio-conmon-90eb3cb4281daad886bae4594391be43309b0d20752cb67f74a30f2858a2b5bd.scope: no such file or directory Dec 03 18:16:55 crc kubenswrapper[5002]: W1203 18:16:55.509351 5002 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod651f5e9b_a1df_401c_9051_bc661918eb75.slice/crio-90eb3cb4281daad886bae4594391be43309b0d20752cb67f74a30f2858a2b5bd.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod651f5e9b_a1df_401c_9051_bc661918eb75.slice/crio-90eb3cb4281daad886bae4594391be43309b0d20752cb67f74a30f2858a2b5bd.scope: no such file or directory Dec 03 18:16:55 crc kubenswrapper[5002]: E1203 18:16:55.765469 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea39ebd5_5040_448e_9e93_1e222a8da1ed.slice/crio-7f060ef01b55427f6195d4b403ee65ced0d681d9c7d86f37a939b22ba7792cad\": RecentStats: unable to find data in memory cache]" Dec 03 18:16:55 crc kubenswrapper[5002]: I1203 18:16:55.972485 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.013084 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7fmd\" (UniqueName: \"kubernetes.io/projected/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-kube-api-access-c7fmd\") pod \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.013465 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-horizon-secret-key\") pod \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.013510 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-logs\") pod \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.013568 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-scripts\") pod \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.013684 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-config-data\") pod \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.013711 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-horizon-tls-certs\") pod \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.013847 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-combined-ca-bundle\") pod \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\" (UID: \"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6\") " Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.014034 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-logs" (OuterVolumeSpecName: "logs") pod "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" (UID: "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.014402 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.021147 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-kube-api-access-c7fmd" (OuterVolumeSpecName: "kube-api-access-c7fmd") pod "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" (UID: "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6"). InnerVolumeSpecName "kube-api-access-c7fmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.027876 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" (UID: "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.046825 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-scripts" (OuterVolumeSpecName: "scripts") pod "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" (UID: "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.046955 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-config-data" (OuterVolumeSpecName: "config-data") pod "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" (UID: "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.052100 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" (UID: "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.074955 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" (UID: "d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.121383 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.121446 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7fmd\" (UniqueName: \"kubernetes.io/projected/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-kube-api-access-c7fmd\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.121463 5002 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.121488 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.121503 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.121517 5002 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.267199 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rd8n9" event={"ID":"651f5e9b-a1df-401c-9051-bc661918eb75","Type":"ContainerStarted","Data":"52cbce8f47bb0bfc28378a99fdd89d415d1b52ffcded64913d611cb314b66895"} Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.269456 5002 generic.go:334] "Generic (PLEG): container finished" podID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerID="46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43" exitCode=137 Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.269496 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f795f5b86-4bds9" event={"ID":"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6","Type":"ContainerDied","Data":"46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43"} Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.269504 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f795f5b86-4bds9" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.269519 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f795f5b86-4bds9" event={"ID":"d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6","Type":"ContainerDied","Data":"8c8a853e9ca31a7b85d036f1178bf4d5f35939f1cb9b38e747eb244509cc80fa"} Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.269540 5002 scope.go:117] "RemoveContainer" containerID="0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.285099 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rd8n9" podStartSLOduration=2.811997933 podStartE2EDuration="6.285078435s" podCreationTimestamp="2025-12-03 18:16:50 +0000 UTC" firstStartedPulling="2025-12-03 18:16:52.192304458 +0000 UTC m=+6335.606126346" lastFinishedPulling="2025-12-03 18:16:55.66538496 +0000 UTC m=+6339.079206848" observedRunningTime="2025-12-03 18:16:56.284494989 +0000 UTC m=+6339.698316897" watchObservedRunningTime="2025-12-03 18:16:56.285078435 +0000 UTC m=+6339.698900333" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.313880 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5f795f5b86-4bds9"] Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.321597 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5f795f5b86-4bds9"] Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.455630 5002 scope.go:117] "RemoveContainer" containerID="46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.474375 5002 scope.go:117] "RemoveContainer" containerID="0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc" Dec 03 18:16:56 crc kubenswrapper[5002]: E1203 18:16:56.474841 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc\": container with ID starting with 0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc not found: ID does not exist" containerID="0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.474874 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc"} err="failed to get container status \"0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc\": rpc error: code = NotFound desc = could not find container \"0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc\": container with ID starting with 0704afb7e256be73300f59c86d95c82d83c220b757b9f9667c0d5138b46180fc not found: ID does not exist" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.474898 5002 scope.go:117] "RemoveContainer" containerID="46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43" Dec 03 18:16:56 crc kubenswrapper[5002]: E1203 18:16:56.475198 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43\": container with ID starting with 46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43 not found: ID does not exist" containerID="46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.475226 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43"} err="failed to get container status \"46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43\": rpc error: code = NotFound desc = could not find container \"46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43\": container with ID starting with 46bb7f872cdccd3b0c17907f072bcdffa81a87e1d4d6116784b79f84e8575b43 not found: ID does not exist" Dec 03 18:16:56 crc kubenswrapper[5002]: I1203 18:16:56.852653 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" path="/var/lib/kubelet/pods/d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6/volumes" Dec 03 18:17:00 crc kubenswrapper[5002]: I1203 18:17:00.730833 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:17:00 crc kubenswrapper[5002]: I1203 18:17:00.731396 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:17:00 crc kubenswrapper[5002]: I1203 18:17:00.782670 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:17:01 crc kubenswrapper[5002]: I1203 18:17:01.387135 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:17:01 crc kubenswrapper[5002]: I1203 18:17:01.452399 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rd8n9"] Dec 03 18:17:03 crc kubenswrapper[5002]: I1203 18:17:03.349173 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rd8n9" podUID="651f5e9b-a1df-401c-9051-bc661918eb75" containerName="registry-server" containerID="cri-o://52cbce8f47bb0bfc28378a99fdd89d415d1b52ffcded64913d611cb314b66895" gracePeriod=2 Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.360016 5002 generic.go:334] "Generic (PLEG): container finished" podID="651f5e9b-a1df-401c-9051-bc661918eb75" containerID="52cbce8f47bb0bfc28378a99fdd89d415d1b52ffcded64913d611cb314b66895" exitCode=0 Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.360114 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rd8n9" event={"ID":"651f5e9b-a1df-401c-9051-bc661918eb75","Type":"ContainerDied","Data":"52cbce8f47bb0bfc28378a99fdd89d415d1b52ffcded64913d611cb314b66895"} Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.360417 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rd8n9" event={"ID":"651f5e9b-a1df-401c-9051-bc661918eb75","Type":"ContainerDied","Data":"ce05274b6cb64e4abe10fb162d2ae7500a385570c940a52e21575d53755c6f4e"} Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.360475 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce05274b6cb64e4abe10fb162d2ae7500a385570c940a52e21575d53755c6f4e" Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.426009 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.614399 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmdjn\" (UniqueName: \"kubernetes.io/projected/651f5e9b-a1df-401c-9051-bc661918eb75-kube-api-access-hmdjn\") pod \"651f5e9b-a1df-401c-9051-bc661918eb75\" (UID: \"651f5e9b-a1df-401c-9051-bc661918eb75\") " Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.614515 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/651f5e9b-a1df-401c-9051-bc661918eb75-utilities\") pod \"651f5e9b-a1df-401c-9051-bc661918eb75\" (UID: \"651f5e9b-a1df-401c-9051-bc661918eb75\") " Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.614605 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/651f5e9b-a1df-401c-9051-bc661918eb75-catalog-content\") pod \"651f5e9b-a1df-401c-9051-bc661918eb75\" (UID: \"651f5e9b-a1df-401c-9051-bc661918eb75\") " Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.615145 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/651f5e9b-a1df-401c-9051-bc661918eb75-utilities" (OuterVolumeSpecName: "utilities") pod "651f5e9b-a1df-401c-9051-bc661918eb75" (UID: "651f5e9b-a1df-401c-9051-bc661918eb75"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.615317 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/651f5e9b-a1df-401c-9051-bc661918eb75-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.619110 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/651f5e9b-a1df-401c-9051-bc661918eb75-kube-api-access-hmdjn" (OuterVolumeSpecName: "kube-api-access-hmdjn") pod "651f5e9b-a1df-401c-9051-bc661918eb75" (UID: "651f5e9b-a1df-401c-9051-bc661918eb75"). InnerVolumeSpecName "kube-api-access-hmdjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.676066 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/651f5e9b-a1df-401c-9051-bc661918eb75-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "651f5e9b-a1df-401c-9051-bc661918eb75" (UID: "651f5e9b-a1df-401c-9051-bc661918eb75"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.716019 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmdjn\" (UniqueName: \"kubernetes.io/projected/651f5e9b-a1df-401c-9051-bc661918eb75-kube-api-access-hmdjn\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:04 crc kubenswrapper[5002]: I1203 18:17:04.716393 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/651f5e9b-a1df-401c-9051-bc661918eb75-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:05 crc kubenswrapper[5002]: I1203 18:17:05.371217 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rd8n9" Dec 03 18:17:05 crc kubenswrapper[5002]: I1203 18:17:05.400431 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rd8n9"] Dec 03 18:17:05 crc kubenswrapper[5002]: I1203 18:17:05.409348 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rd8n9"] Dec 03 18:17:06 crc kubenswrapper[5002]: E1203 18:17:06.021212 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea39ebd5_5040_448e_9e93_1e222a8da1ed.slice/crio-7f060ef01b55427f6195d4b403ee65ced0d681d9c7d86f37a939b22ba7792cad\": RecentStats: unable to find data in memory cache]" Dec 03 18:17:06 crc kubenswrapper[5002]: I1203 18:17:06.852844 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="651f5e9b-a1df-401c-9051-bc661918eb75" path="/var/lib/kubelet/pods/651f5e9b-a1df-401c-9051-bc661918eb75/volumes" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.573248 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-57d6578878-lnnzf"] Dec 03 18:17:07 crc kubenswrapper[5002]: E1203 18:17:07.573722 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon-log" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.573742 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon-log" Dec 03 18:17:07 crc kubenswrapper[5002]: E1203 18:17:07.573779 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.573787 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon" Dec 03 18:17:07 crc kubenswrapper[5002]: E1203 18:17:07.573809 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="651f5e9b-a1df-401c-9051-bc661918eb75" containerName="extract-content" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.573819 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="651f5e9b-a1df-401c-9051-bc661918eb75" containerName="extract-content" Dec 03 18:17:07 crc kubenswrapper[5002]: E1203 18:17:07.573837 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="651f5e9b-a1df-401c-9051-bc661918eb75" containerName="extract-utilities" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.573844 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="651f5e9b-a1df-401c-9051-bc661918eb75" containerName="extract-utilities" Dec 03 18:17:07 crc kubenswrapper[5002]: E1203 18:17:07.573855 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="651f5e9b-a1df-401c-9051-bc661918eb75" containerName="registry-server" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.573863 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="651f5e9b-a1df-401c-9051-bc661918eb75" containerName="registry-server" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.574109 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="651f5e9b-a1df-401c-9051-bc661918eb75" containerName="registry-server" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.574134 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon-log" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.574153 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4a741bc-14b4-4d21-b6b0-5cc5a8998eb6" containerName="horizon" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.575491 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.600630 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-57d6578878-lnnzf"] Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.673651 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlr86\" (UniqueName: \"kubernetes.io/projected/15a59bf6-beb9-43f8-b192-5f3dfe627c28-kube-api-access-xlr86\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.673717 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a59bf6-beb9-43f8-b192-5f3dfe627c28-combined-ca-bundle\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.673756 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15a59bf6-beb9-43f8-b192-5f3dfe627c28-logs\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.673771 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/15a59bf6-beb9-43f8-b192-5f3dfe627c28-horizon-tls-certs\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.673820 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15a59bf6-beb9-43f8-b192-5f3dfe627c28-config-data\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.673878 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/15a59bf6-beb9-43f8-b192-5f3dfe627c28-horizon-secret-key\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.673909 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/15a59bf6-beb9-43f8-b192-5f3dfe627c28-scripts\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.775617 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15a59bf6-beb9-43f8-b192-5f3dfe627c28-config-data\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.775723 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/15a59bf6-beb9-43f8-b192-5f3dfe627c28-horizon-secret-key\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.775773 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/15a59bf6-beb9-43f8-b192-5f3dfe627c28-scripts\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.775840 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlr86\" (UniqueName: \"kubernetes.io/projected/15a59bf6-beb9-43f8-b192-5f3dfe627c28-kube-api-access-xlr86\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.775872 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a59bf6-beb9-43f8-b192-5f3dfe627c28-combined-ca-bundle\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.775897 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15a59bf6-beb9-43f8-b192-5f3dfe627c28-logs\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.775912 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/15a59bf6-beb9-43f8-b192-5f3dfe627c28-horizon-tls-certs\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.776651 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15a59bf6-beb9-43f8-b192-5f3dfe627c28-logs\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.777688 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/15a59bf6-beb9-43f8-b192-5f3dfe627c28-scripts\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.778153 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15a59bf6-beb9-43f8-b192-5f3dfe627c28-config-data\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.781644 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/15a59bf6-beb9-43f8-b192-5f3dfe627c28-horizon-secret-key\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.782544 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a59bf6-beb9-43f8-b192-5f3dfe627c28-combined-ca-bundle\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.784394 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/15a59bf6-beb9-43f8-b192-5f3dfe627c28-horizon-tls-certs\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.793862 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlr86\" (UniqueName: \"kubernetes.io/projected/15a59bf6-beb9-43f8-b192-5f3dfe627c28-kube-api-access-xlr86\") pod \"horizon-57d6578878-lnnzf\" (UID: \"15a59bf6-beb9-43f8-b192-5f3dfe627c28\") " pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:07 crc kubenswrapper[5002]: I1203 18:17:07.896817 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:08 crc kubenswrapper[5002]: I1203 18:17:08.346984 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-57d6578878-lnnzf"] Dec 03 18:17:08 crc kubenswrapper[5002]: I1203 18:17:08.416791 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-57d6578878-lnnzf" event={"ID":"15a59bf6-beb9-43f8-b192-5f3dfe627c28","Type":"ContainerStarted","Data":"25ff3c49e6becccdeaf6c1a1e41e2cfa455fb2a00c4753463dfc980db25e5b72"} Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.075895 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-szwjx"] Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.077793 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-szwjx" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.086667 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-szwjx"] Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.162959 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-70e9-account-create-update-sp89s"] Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.164230 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-70e9-account-create-update-sp89s" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.165929 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.180643 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-70e9-account-create-update-sp89s"] Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.210162 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4973cbe-4313-4bdd-af74-81dade285f65-operator-scripts\") pod \"heat-db-create-szwjx\" (UID: \"e4973cbe-4313-4bdd-af74-81dade285f65\") " pod="openstack/heat-db-create-szwjx" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.210566 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ks76t\" (UniqueName: \"kubernetes.io/projected/e4973cbe-4313-4bdd-af74-81dade285f65-kube-api-access-ks76t\") pod \"heat-db-create-szwjx\" (UID: \"e4973cbe-4313-4bdd-af74-81dade285f65\") " pod="openstack/heat-db-create-szwjx" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.312676 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ks76t\" (UniqueName: \"kubernetes.io/projected/e4973cbe-4313-4bdd-af74-81dade285f65-kube-api-access-ks76t\") pod \"heat-db-create-szwjx\" (UID: \"e4973cbe-4313-4bdd-af74-81dade285f65\") " pod="openstack/heat-db-create-szwjx" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.313077 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a49549ac-d109-4567-b79c-a2df131387aa-operator-scripts\") pod \"heat-70e9-account-create-update-sp89s\" (UID: \"a49549ac-d109-4567-b79c-a2df131387aa\") " pod="openstack/heat-70e9-account-create-update-sp89s" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.313248 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4973cbe-4313-4bdd-af74-81dade285f65-operator-scripts\") pod \"heat-db-create-szwjx\" (UID: \"e4973cbe-4313-4bdd-af74-81dade285f65\") " pod="openstack/heat-db-create-szwjx" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.313363 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftsw6\" (UniqueName: \"kubernetes.io/projected/a49549ac-d109-4567-b79c-a2df131387aa-kube-api-access-ftsw6\") pod \"heat-70e9-account-create-update-sp89s\" (UID: \"a49549ac-d109-4567-b79c-a2df131387aa\") " pod="openstack/heat-70e9-account-create-update-sp89s" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.314141 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4973cbe-4313-4bdd-af74-81dade285f65-operator-scripts\") pod \"heat-db-create-szwjx\" (UID: \"e4973cbe-4313-4bdd-af74-81dade285f65\") " pod="openstack/heat-db-create-szwjx" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.332429 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ks76t\" (UniqueName: \"kubernetes.io/projected/e4973cbe-4313-4bdd-af74-81dade285f65-kube-api-access-ks76t\") pod \"heat-db-create-szwjx\" (UID: \"e4973cbe-4313-4bdd-af74-81dade285f65\") " pod="openstack/heat-db-create-szwjx" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.413299 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-szwjx" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.414644 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a49549ac-d109-4567-b79c-a2df131387aa-operator-scripts\") pod \"heat-70e9-account-create-update-sp89s\" (UID: \"a49549ac-d109-4567-b79c-a2df131387aa\") " pod="openstack/heat-70e9-account-create-update-sp89s" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.414822 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftsw6\" (UniqueName: \"kubernetes.io/projected/a49549ac-d109-4567-b79c-a2df131387aa-kube-api-access-ftsw6\") pod \"heat-70e9-account-create-update-sp89s\" (UID: \"a49549ac-d109-4567-b79c-a2df131387aa\") " pod="openstack/heat-70e9-account-create-update-sp89s" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.415575 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a49549ac-d109-4567-b79c-a2df131387aa-operator-scripts\") pod \"heat-70e9-account-create-update-sp89s\" (UID: \"a49549ac-d109-4567-b79c-a2df131387aa\") " pod="openstack/heat-70e9-account-create-update-sp89s" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.433043 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-57d6578878-lnnzf" event={"ID":"15a59bf6-beb9-43f8-b192-5f3dfe627c28","Type":"ContainerStarted","Data":"7d38b4e8247e8a6986e8fefa3a81feaf50b32d602a9cc4ebf0a6d020f36ff8eb"} Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.433089 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-57d6578878-lnnzf" event={"ID":"15a59bf6-beb9-43f8-b192-5f3dfe627c28","Type":"ContainerStarted","Data":"92f0f83485e77871b64bda7db334c26f070c97d43335fe431482657dfa60586c"} Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.450729 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftsw6\" (UniqueName: \"kubernetes.io/projected/a49549ac-d109-4567-b79c-a2df131387aa-kube-api-access-ftsw6\") pod \"heat-70e9-account-create-update-sp89s\" (UID: \"a49549ac-d109-4567-b79c-a2df131387aa\") " pod="openstack/heat-70e9-account-create-update-sp89s" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.462574 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-57d6578878-lnnzf" podStartSLOduration=2.462547947 podStartE2EDuration="2.462547947s" podCreationTimestamp="2025-12-03 18:17:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:17:09.455262141 +0000 UTC m=+6352.869084029" watchObservedRunningTime="2025-12-03 18:17:09.462547947 +0000 UTC m=+6352.876369845" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.481908 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-70e9-account-create-update-sp89s" Dec 03 18:17:09 crc kubenswrapper[5002]: I1203 18:17:09.769258 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-szwjx"] Dec 03 18:17:09 crc kubenswrapper[5002]: W1203 18:17:09.770988 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4973cbe_4313_4bdd_af74_81dade285f65.slice/crio-b56a70daa0c4083dddfe51332b119888912a54342f0986675bfd0d9f47ea3bdf WatchSource:0}: Error finding container b56a70daa0c4083dddfe51332b119888912a54342f0986675bfd0d9f47ea3bdf: Status 404 returned error can't find the container with id b56a70daa0c4083dddfe51332b119888912a54342f0986675bfd0d9f47ea3bdf Dec 03 18:17:10 crc kubenswrapper[5002]: W1203 18:17:10.059296 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda49549ac_d109_4567_b79c_a2df131387aa.slice/crio-fef3bfe270e4f29b75565b7563f97f7e18bfe12552b6de0f346ab1d5f8fc5701 WatchSource:0}: Error finding container fef3bfe270e4f29b75565b7563f97f7e18bfe12552b6de0f346ab1d5f8fc5701: Status 404 returned error can't find the container with id fef3bfe270e4f29b75565b7563f97f7e18bfe12552b6de0f346ab1d5f8fc5701 Dec 03 18:17:10 crc kubenswrapper[5002]: I1203 18:17:10.062349 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-70e9-account-create-update-sp89s"] Dec 03 18:17:10 crc kubenswrapper[5002]: I1203 18:17:10.442827 5002 generic.go:334] "Generic (PLEG): container finished" podID="e4973cbe-4313-4bdd-af74-81dade285f65" containerID="60796b4ea23d163d7fae93c3070ed6269d66a1216cb3ed1686c19c10467c35fe" exitCode=0 Dec 03 18:17:10 crc kubenswrapper[5002]: I1203 18:17:10.442921 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-szwjx" event={"ID":"e4973cbe-4313-4bdd-af74-81dade285f65","Type":"ContainerDied","Data":"60796b4ea23d163d7fae93c3070ed6269d66a1216cb3ed1686c19c10467c35fe"} Dec 03 18:17:10 crc kubenswrapper[5002]: I1203 18:17:10.442960 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-szwjx" event={"ID":"e4973cbe-4313-4bdd-af74-81dade285f65","Type":"ContainerStarted","Data":"b56a70daa0c4083dddfe51332b119888912a54342f0986675bfd0d9f47ea3bdf"} Dec 03 18:17:10 crc kubenswrapper[5002]: I1203 18:17:10.445910 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-70e9-account-create-update-sp89s" event={"ID":"a49549ac-d109-4567-b79c-a2df131387aa","Type":"ContainerStarted","Data":"dcc71c7b1dcd7d2737a9dc04e7b4f091a08253f1d4cd064c05330cbfe0ab3d5d"} Dec 03 18:17:10 crc kubenswrapper[5002]: I1203 18:17:10.445942 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-70e9-account-create-update-sp89s" event={"ID":"a49549ac-d109-4567-b79c-a2df131387aa","Type":"ContainerStarted","Data":"fef3bfe270e4f29b75565b7563f97f7e18bfe12552b6de0f346ab1d5f8fc5701"} Dec 03 18:17:10 crc kubenswrapper[5002]: I1203 18:17:10.479339 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-70e9-account-create-update-sp89s" podStartSLOduration=1.479313489 podStartE2EDuration="1.479313489s" podCreationTimestamp="2025-12-03 18:17:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:17:10.469042201 +0000 UTC m=+6353.882864099" watchObservedRunningTime="2025-12-03 18:17:10.479313489 +0000 UTC m=+6353.893135387" Dec 03 18:17:11 crc kubenswrapper[5002]: I1203 18:17:11.459195 5002 generic.go:334] "Generic (PLEG): container finished" podID="a49549ac-d109-4567-b79c-a2df131387aa" containerID="dcc71c7b1dcd7d2737a9dc04e7b4f091a08253f1d4cd064c05330cbfe0ab3d5d" exitCode=0 Dec 03 18:17:11 crc kubenswrapper[5002]: I1203 18:17:11.459239 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-70e9-account-create-update-sp89s" event={"ID":"a49549ac-d109-4567-b79c-a2df131387aa","Type":"ContainerDied","Data":"dcc71c7b1dcd7d2737a9dc04e7b4f091a08253f1d4cd064c05330cbfe0ab3d5d"} Dec 03 18:17:11 crc kubenswrapper[5002]: I1203 18:17:11.946130 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-szwjx" Dec 03 18:17:11 crc kubenswrapper[5002]: I1203 18:17:11.978542 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4973cbe-4313-4bdd-af74-81dade285f65-operator-scripts\") pod \"e4973cbe-4313-4bdd-af74-81dade285f65\" (UID: \"e4973cbe-4313-4bdd-af74-81dade285f65\") " Dec 03 18:17:11 crc kubenswrapper[5002]: I1203 18:17:11.978699 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ks76t\" (UniqueName: \"kubernetes.io/projected/e4973cbe-4313-4bdd-af74-81dade285f65-kube-api-access-ks76t\") pod \"e4973cbe-4313-4bdd-af74-81dade285f65\" (UID: \"e4973cbe-4313-4bdd-af74-81dade285f65\") " Dec 03 18:17:11 crc kubenswrapper[5002]: I1203 18:17:11.979106 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4973cbe-4313-4bdd-af74-81dade285f65-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e4973cbe-4313-4bdd-af74-81dade285f65" (UID: "e4973cbe-4313-4bdd-af74-81dade285f65"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:17:11 crc kubenswrapper[5002]: I1203 18:17:11.979439 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4973cbe-4313-4bdd-af74-81dade285f65-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:11 crc kubenswrapper[5002]: I1203 18:17:11.984973 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4973cbe-4313-4bdd-af74-81dade285f65-kube-api-access-ks76t" (OuterVolumeSpecName: "kube-api-access-ks76t") pod "e4973cbe-4313-4bdd-af74-81dade285f65" (UID: "e4973cbe-4313-4bdd-af74-81dade285f65"). InnerVolumeSpecName "kube-api-access-ks76t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:17:12 crc kubenswrapper[5002]: I1203 18:17:12.080882 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ks76t\" (UniqueName: \"kubernetes.io/projected/e4973cbe-4313-4bdd-af74-81dade285f65-kube-api-access-ks76t\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:12 crc kubenswrapper[5002]: I1203 18:17:12.469675 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-szwjx" Dec 03 18:17:12 crc kubenswrapper[5002]: I1203 18:17:12.469716 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-szwjx" event={"ID":"e4973cbe-4313-4bdd-af74-81dade285f65","Type":"ContainerDied","Data":"b56a70daa0c4083dddfe51332b119888912a54342f0986675bfd0d9f47ea3bdf"} Dec 03 18:17:12 crc kubenswrapper[5002]: I1203 18:17:12.469739 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b56a70daa0c4083dddfe51332b119888912a54342f0986675bfd0d9f47ea3bdf" Dec 03 18:17:12 crc kubenswrapper[5002]: I1203 18:17:12.884590 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-70e9-account-create-update-sp89s" Dec 03 18:17:12 crc kubenswrapper[5002]: I1203 18:17:12.995219 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftsw6\" (UniqueName: \"kubernetes.io/projected/a49549ac-d109-4567-b79c-a2df131387aa-kube-api-access-ftsw6\") pod \"a49549ac-d109-4567-b79c-a2df131387aa\" (UID: \"a49549ac-d109-4567-b79c-a2df131387aa\") " Dec 03 18:17:12 crc kubenswrapper[5002]: I1203 18:17:12.995431 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a49549ac-d109-4567-b79c-a2df131387aa-operator-scripts\") pod \"a49549ac-d109-4567-b79c-a2df131387aa\" (UID: \"a49549ac-d109-4567-b79c-a2df131387aa\") " Dec 03 18:17:12 crc kubenswrapper[5002]: I1203 18:17:12.995980 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a49549ac-d109-4567-b79c-a2df131387aa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a49549ac-d109-4567-b79c-a2df131387aa" (UID: "a49549ac-d109-4567-b79c-a2df131387aa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:17:12 crc kubenswrapper[5002]: I1203 18:17:12.996406 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a49549ac-d109-4567-b79c-a2df131387aa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:13 crc kubenswrapper[5002]: I1203 18:17:13.007924 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a49549ac-d109-4567-b79c-a2df131387aa-kube-api-access-ftsw6" (OuterVolumeSpecName: "kube-api-access-ftsw6") pod "a49549ac-d109-4567-b79c-a2df131387aa" (UID: "a49549ac-d109-4567-b79c-a2df131387aa"). InnerVolumeSpecName "kube-api-access-ftsw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:17:13 crc kubenswrapper[5002]: I1203 18:17:13.098859 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftsw6\" (UniqueName: \"kubernetes.io/projected/a49549ac-d109-4567-b79c-a2df131387aa-kube-api-access-ftsw6\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:13 crc kubenswrapper[5002]: I1203 18:17:13.480780 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-70e9-account-create-update-sp89s" event={"ID":"a49549ac-d109-4567-b79c-a2df131387aa","Type":"ContainerDied","Data":"fef3bfe270e4f29b75565b7563f97f7e18bfe12552b6de0f346ab1d5f8fc5701"} Dec 03 18:17:13 crc kubenswrapper[5002]: I1203 18:17:13.481208 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fef3bfe270e4f29b75565b7563f97f7e18bfe12552b6de0f346ab1d5f8fc5701" Dec 03 18:17:13 crc kubenswrapper[5002]: I1203 18:17:13.480827 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-70e9-account-create-update-sp89s" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.216069 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-tdkwj"] Dec 03 18:17:14 crc kubenswrapper[5002]: E1203 18:17:14.216799 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4973cbe-4313-4bdd-af74-81dade285f65" containerName="mariadb-database-create" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.216829 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4973cbe-4313-4bdd-af74-81dade285f65" containerName="mariadb-database-create" Dec 03 18:17:14 crc kubenswrapper[5002]: E1203 18:17:14.216895 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a49549ac-d109-4567-b79c-a2df131387aa" containerName="mariadb-account-create-update" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.216913 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="a49549ac-d109-4567-b79c-a2df131387aa" containerName="mariadb-account-create-update" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.217275 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="a49549ac-d109-4567-b79c-a2df131387aa" containerName="mariadb-account-create-update" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.217318 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4973cbe-4313-4bdd-af74-81dade285f65" containerName="mariadb-database-create" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.218629 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.220557 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.222677 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-gwzb4" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.226925 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-tdkwj"] Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.324371 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-combined-ca-bundle\") pod \"heat-db-sync-tdkwj\" (UID: \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\") " pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.324553 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd7k5\" (UniqueName: \"kubernetes.io/projected/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-kube-api-access-rd7k5\") pod \"heat-db-sync-tdkwj\" (UID: \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\") " pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.324702 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-config-data\") pod \"heat-db-sync-tdkwj\" (UID: \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\") " pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.426901 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-config-data\") pod \"heat-db-sync-tdkwj\" (UID: \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\") " pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.426958 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-combined-ca-bundle\") pod \"heat-db-sync-tdkwj\" (UID: \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\") " pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.427041 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd7k5\" (UniqueName: \"kubernetes.io/projected/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-kube-api-access-rd7k5\") pod \"heat-db-sync-tdkwj\" (UID: \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\") " pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.433661 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-config-data\") pod \"heat-db-sync-tdkwj\" (UID: \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\") " pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.445538 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-combined-ca-bundle\") pod \"heat-db-sync-tdkwj\" (UID: \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\") " pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.448076 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd7k5\" (UniqueName: \"kubernetes.io/projected/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-kube-api-access-rd7k5\") pod \"heat-db-sync-tdkwj\" (UID: \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\") " pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:14 crc kubenswrapper[5002]: I1203 18:17:14.542624 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:15 crc kubenswrapper[5002]: I1203 18:17:15.004950 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-tdkwj"] Dec 03 18:17:15 crc kubenswrapper[5002]: I1203 18:17:15.498952 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-tdkwj" event={"ID":"94a46076-2c6c-48fe-8ec3-6b239ab5aa55","Type":"ContainerStarted","Data":"634812aff5acaddc79fead1c3de214f0c54a57b53b0bc937b241d662c7072db0"} Dec 03 18:17:16 crc kubenswrapper[5002]: E1203 18:17:16.261652 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea39ebd5_5040_448e_9e93_1e222a8da1ed.slice/crio-7f060ef01b55427f6195d4b403ee65ced0d681d9c7d86f37a939b22ba7792cad\": RecentStats: unable to find data in memory cache]" Dec 03 18:17:17 crc kubenswrapper[5002]: I1203 18:17:17.896938 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:17 crc kubenswrapper[5002]: I1203 18:17:17.897303 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:20 crc kubenswrapper[5002]: I1203 18:17:20.040402 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-ee79-account-create-update-gpcb8"] Dec 03 18:17:20 crc kubenswrapper[5002]: I1203 18:17:20.057665 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-wrccs"] Dec 03 18:17:20 crc kubenswrapper[5002]: I1203 18:17:20.068200 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-ee79-account-create-update-gpcb8"] Dec 03 18:17:20 crc kubenswrapper[5002]: I1203 18:17:20.077089 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-wrccs"] Dec 03 18:17:20 crc kubenswrapper[5002]: I1203 18:17:20.856730 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36ce123f-22ef-459f-aa84-40ac04d8a5ac" path="/var/lib/kubelet/pods/36ce123f-22ef-459f-aa84-40ac04d8a5ac/volumes" Dec 03 18:17:20 crc kubenswrapper[5002]: I1203 18:17:20.907577 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f" path="/var/lib/kubelet/pods/eaad4f00-65b7-4ee8-bc09-d4dd569a5f1f/volumes" Dec 03 18:17:23 crc kubenswrapper[5002]: I1203 18:17:23.596073 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-tdkwj" event={"ID":"94a46076-2c6c-48fe-8ec3-6b239ab5aa55","Type":"ContainerStarted","Data":"f3862aa55aeddc92a972ce94167f7ec087c01f2148302b6b220f5fea0c158742"} Dec 03 18:17:23 crc kubenswrapper[5002]: I1203 18:17:23.628591 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-tdkwj" podStartSLOduration=2.284779904 podStartE2EDuration="9.628566391s" podCreationTimestamp="2025-12-03 18:17:14 +0000 UTC" firstStartedPulling="2025-12-03 18:17:15.01586262 +0000 UTC m=+6358.429684508" lastFinishedPulling="2025-12-03 18:17:22.359649107 +0000 UTC m=+6365.773470995" observedRunningTime="2025-12-03 18:17:23.620117563 +0000 UTC m=+6367.033939461" watchObservedRunningTime="2025-12-03 18:17:23.628566391 +0000 UTC m=+6367.042388289" Dec 03 18:17:24 crc kubenswrapper[5002]: I1203 18:17:24.633910 5002 generic.go:334] "Generic (PLEG): container finished" podID="94a46076-2c6c-48fe-8ec3-6b239ab5aa55" containerID="f3862aa55aeddc92a972ce94167f7ec087c01f2148302b6b220f5fea0c158742" exitCode=0 Dec 03 18:17:24 crc kubenswrapper[5002]: I1203 18:17:24.634434 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-tdkwj" event={"ID":"94a46076-2c6c-48fe-8ec3-6b239ab5aa55","Type":"ContainerDied","Data":"f3862aa55aeddc92a972ce94167f7ec087c01f2148302b6b220f5fea0c158742"} Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.081106 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.194041 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rd7k5\" (UniqueName: \"kubernetes.io/projected/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-kube-api-access-rd7k5\") pod \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\" (UID: \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\") " Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.194412 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-combined-ca-bundle\") pod \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\" (UID: \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\") " Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.194456 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-config-data\") pod \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\" (UID: \"94a46076-2c6c-48fe-8ec3-6b239ab5aa55\") " Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.200069 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-kube-api-access-rd7k5" (OuterVolumeSpecName: "kube-api-access-rd7k5") pod "94a46076-2c6c-48fe-8ec3-6b239ab5aa55" (UID: "94a46076-2c6c-48fe-8ec3-6b239ab5aa55"). InnerVolumeSpecName "kube-api-access-rd7k5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.222699 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94a46076-2c6c-48fe-8ec3-6b239ab5aa55" (UID: "94a46076-2c6c-48fe-8ec3-6b239ab5aa55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.266196 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-config-data" (OuterVolumeSpecName: "config-data") pod "94a46076-2c6c-48fe-8ec3-6b239ab5aa55" (UID: "94a46076-2c6c-48fe-8ec3-6b239ab5aa55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.297589 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rd7k5\" (UniqueName: \"kubernetes.io/projected/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-kube-api-access-rd7k5\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.297626 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.297641 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94a46076-2c6c-48fe-8ec3-6b239ab5aa55-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.671796 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-tdkwj" event={"ID":"94a46076-2c6c-48fe-8ec3-6b239ab5aa55","Type":"ContainerDied","Data":"634812aff5acaddc79fead1c3de214f0c54a57b53b0bc937b241d662c7072db0"} Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.672300 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="634812aff5acaddc79fead1c3de214f0c54a57b53b0bc937b241d662c7072db0" Dec 03 18:17:26 crc kubenswrapper[5002]: I1203 18:17:26.671860 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-tdkwj" Dec 03 18:17:27 crc kubenswrapper[5002]: I1203 18:17:27.849911 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-577995c5c6-vwxf6"] Dec 03 18:17:27 crc kubenswrapper[5002]: E1203 18:17:27.850814 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94a46076-2c6c-48fe-8ec3-6b239ab5aa55" containerName="heat-db-sync" Dec 03 18:17:27 crc kubenswrapper[5002]: I1203 18:17:27.850831 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="94a46076-2c6c-48fe-8ec3-6b239ab5aa55" containerName="heat-db-sync" Dec 03 18:17:27 crc kubenswrapper[5002]: I1203 18:17:27.851112 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="94a46076-2c6c-48fe-8ec3-6b239ab5aa55" containerName="heat-db-sync" Dec 03 18:17:27 crc kubenswrapper[5002]: I1203 18:17:27.851969 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:27 crc kubenswrapper[5002]: I1203 18:17:27.855342 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Dec 03 18:17:27 crc kubenswrapper[5002]: I1203 18:17:27.855740 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Dec 03 18:17:27 crc kubenswrapper[5002]: I1203 18:17:27.855981 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-gwzb4" Dec 03 18:17:27 crc kubenswrapper[5002]: I1203 18:17:27.872492 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-577995c5c6-vwxf6"] Dec 03 18:17:27 crc kubenswrapper[5002]: I1203 18:17:27.905508 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-57d6578878-lnnzf" podUID="15a59bf6-beb9-43f8-b192-5f3dfe627c28" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.118:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.118:8443: connect: connection refused" Dec 03 18:17:27 crc kubenswrapper[5002]: I1203 18:17:27.966703 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-5c79b54d59-bnljn"] Dec 03 18:17:27 crc kubenswrapper[5002]: I1203 18:17:27.987078 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:27 crc kubenswrapper[5002]: I1203 18:17:27.992388 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.035829 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-config-data-custom\") pod \"heat-engine-577995c5c6-vwxf6\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.073870 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-combined-ca-bundle\") pod \"heat-engine-577995c5c6-vwxf6\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.074177 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkkbj\" (UniqueName: \"kubernetes.io/projected/ac39d25b-ee08-46dc-9ec0-3a6d91737197-kube-api-access-gkkbj\") pod \"heat-engine-577995c5c6-vwxf6\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.074399 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-config-data\") pod \"heat-engine-577995c5c6-vwxf6\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.083794 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-5c79b54d59-bnljn"] Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.117815 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-746b747946-smp42"] Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.119252 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.126068 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.175977 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-config-data-custom\") pod \"heat-engine-577995c5c6-vwxf6\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.176020 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-combined-ca-bundle\") pod \"heat-engine-577995c5c6-vwxf6\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.176066 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkkbj\" (UniqueName: \"kubernetes.io/projected/ac39d25b-ee08-46dc-9ec0-3a6d91737197-kube-api-access-gkkbj\") pod \"heat-engine-577995c5c6-vwxf6\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.176132 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-combined-ca-bundle\") pod \"heat-api-5c79b54d59-bnljn\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.176159 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-config-data\") pod \"heat-api-5c79b54d59-bnljn\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.176178 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-config-data-custom\") pod \"heat-cfnapi-746b747946-smp42\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.176205 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-config-data\") pod \"heat-engine-577995c5c6-vwxf6\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.176223 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-combined-ca-bundle\") pod \"heat-cfnapi-746b747946-smp42\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.176253 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-config-data\") pod \"heat-cfnapi-746b747946-smp42\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.176274 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-config-data-custom\") pod \"heat-api-5c79b54d59-bnljn\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.176295 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btb5c\" (UniqueName: \"kubernetes.io/projected/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-kube-api-access-btb5c\") pod \"heat-cfnapi-746b747946-smp42\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.176316 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8lxz\" (UniqueName: \"kubernetes.io/projected/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-kube-api-access-f8lxz\") pod \"heat-api-5c79b54d59-bnljn\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.182555 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-combined-ca-bundle\") pod \"heat-engine-577995c5c6-vwxf6\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.182591 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-config-data-custom\") pod \"heat-engine-577995c5c6-vwxf6\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.182909 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-config-data\") pod \"heat-engine-577995c5c6-vwxf6\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.197317 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-746b747946-smp42"] Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.198916 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkkbj\" (UniqueName: \"kubernetes.io/projected/ac39d25b-ee08-46dc-9ec0-3a6d91737197-kube-api-access-gkkbj\") pod \"heat-engine-577995c5c6-vwxf6\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.238219 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-ph8lh"] Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.262703 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-ph8lh"] Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.280525 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-combined-ca-bundle\") pod \"heat-api-5c79b54d59-bnljn\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.280571 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-config-data\") pod \"heat-api-5c79b54d59-bnljn\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.280596 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-config-data-custom\") pod \"heat-cfnapi-746b747946-smp42\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.280623 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-combined-ca-bundle\") pod \"heat-cfnapi-746b747946-smp42\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.280656 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-config-data\") pod \"heat-cfnapi-746b747946-smp42\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.280677 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-config-data-custom\") pod \"heat-api-5c79b54d59-bnljn\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.280697 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btb5c\" (UniqueName: \"kubernetes.io/projected/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-kube-api-access-btb5c\") pod \"heat-cfnapi-746b747946-smp42\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.280718 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8lxz\" (UniqueName: \"kubernetes.io/projected/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-kube-api-access-f8lxz\") pod \"heat-api-5c79b54d59-bnljn\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.286384 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-combined-ca-bundle\") pod \"heat-cfnapi-746b747946-smp42\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.286692 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-combined-ca-bundle\") pod \"heat-api-5c79b54d59-bnljn\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.288840 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-config-data\") pod \"heat-cfnapi-746b747946-smp42\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.292551 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-config-data-custom\") pod \"heat-cfnapi-746b747946-smp42\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.292655 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-config-data-custom\") pod \"heat-api-5c79b54d59-bnljn\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.293142 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-config-data\") pod \"heat-api-5c79b54d59-bnljn\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.300524 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btb5c\" (UniqueName: \"kubernetes.io/projected/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-kube-api-access-btb5c\") pod \"heat-cfnapi-746b747946-smp42\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.304826 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8lxz\" (UniqueName: \"kubernetes.io/projected/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-kube-api-access-f8lxz\") pod \"heat-api-5c79b54d59-bnljn\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.336424 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.462614 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.495194 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.853788 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c437a30-65ce-4e57-9091-7d670bd45e54" path="/var/lib/kubelet/pods/5c437a30-65ce-4e57-9091-7d670bd45e54/volumes" Dec 03 18:17:28 crc kubenswrapper[5002]: I1203 18:17:28.871395 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-5c79b54d59-bnljn"] Dec 03 18:17:29 crc kubenswrapper[5002]: I1203 18:17:29.011336 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-746b747946-smp42"] Dec 03 18:17:29 crc kubenswrapper[5002]: I1203 18:17:29.203132 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-577995c5c6-vwxf6"] Dec 03 18:17:29 crc kubenswrapper[5002]: W1203 18:17:29.212048 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac39d25b_ee08_46dc_9ec0_3a6d91737197.slice/crio-38c3a5d838355dbc1283c9b4d6b91178e773334544e27c2a82fe155f89bc5af0 WatchSource:0}: Error finding container 38c3a5d838355dbc1283c9b4d6b91178e773334544e27c2a82fe155f89bc5af0: Status 404 returned error can't find the container with id 38c3a5d838355dbc1283c9b4d6b91178e773334544e27c2a82fe155f89bc5af0 Dec 03 18:17:29 crc kubenswrapper[5002]: I1203 18:17:29.712760 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-746b747946-smp42" event={"ID":"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7","Type":"ContainerStarted","Data":"6d964f1a00d1d16553881c7e21416e5199764d10cfcb3dd1afcf45640a7120d7"} Dec 03 18:17:29 crc kubenswrapper[5002]: I1203 18:17:29.716029 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5c79b54d59-bnljn" event={"ID":"c4fa7e7d-0739-47ea-99ba-b157cc3b453f","Type":"ContainerStarted","Data":"304b85b41f1f05658803c4fec3d0b8e9dcba396636e89e0f97ee52ac32032b4a"} Dec 03 18:17:29 crc kubenswrapper[5002]: I1203 18:17:29.719144 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-577995c5c6-vwxf6" event={"ID":"ac39d25b-ee08-46dc-9ec0-3a6d91737197","Type":"ContainerStarted","Data":"c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf"} Dec 03 18:17:29 crc kubenswrapper[5002]: I1203 18:17:29.719171 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-577995c5c6-vwxf6" event={"ID":"ac39d25b-ee08-46dc-9ec0-3a6d91737197","Type":"ContainerStarted","Data":"38c3a5d838355dbc1283c9b4d6b91178e773334544e27c2a82fe155f89bc5af0"} Dec 03 18:17:29 crc kubenswrapper[5002]: I1203 18:17:29.719774 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:29 crc kubenswrapper[5002]: I1203 18:17:29.736560 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-577995c5c6-vwxf6" podStartSLOduration=2.736533765 podStartE2EDuration="2.736533765s" podCreationTimestamp="2025-12-03 18:17:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:17:29.731691505 +0000 UTC m=+6373.145513403" watchObservedRunningTime="2025-12-03 18:17:29.736533765 +0000 UTC m=+6373.150355653" Dec 03 18:17:31 crc kubenswrapper[5002]: I1203 18:17:31.742686 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5c79b54d59-bnljn" event={"ID":"c4fa7e7d-0739-47ea-99ba-b157cc3b453f","Type":"ContainerStarted","Data":"9d17080e8f60d6145574b3b83fc0f624eb70bec9933197183c3858887f069514"} Dec 03 18:17:31 crc kubenswrapper[5002]: I1203 18:17:31.743309 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:31 crc kubenswrapper[5002]: I1203 18:17:31.745431 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-746b747946-smp42" event={"ID":"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7","Type":"ContainerStarted","Data":"4dd404f7a9f7de18e72828f49c0a5e231a7d569d2398505f3ed005219f75e892"} Dec 03 18:17:31 crc kubenswrapper[5002]: I1203 18:17:31.745588 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:31 crc kubenswrapper[5002]: I1203 18:17:31.761603 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-5c79b54d59-bnljn" podStartSLOduration=2.3017165520000002 podStartE2EDuration="4.761583059s" podCreationTimestamp="2025-12-03 18:17:27 +0000 UTC" firstStartedPulling="2025-12-03 18:17:28.877988887 +0000 UTC m=+6372.291810775" lastFinishedPulling="2025-12-03 18:17:31.337855394 +0000 UTC m=+6374.751677282" observedRunningTime="2025-12-03 18:17:31.760318135 +0000 UTC m=+6375.174140023" watchObservedRunningTime="2025-12-03 18:17:31.761583059 +0000 UTC m=+6375.175404947" Dec 03 18:17:31 crc kubenswrapper[5002]: I1203 18:17:31.776789 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-746b747946-smp42" podStartSLOduration=2.460822168 podStartE2EDuration="4.776774388s" podCreationTimestamp="2025-12-03 18:17:27 +0000 UTC" firstStartedPulling="2025-12-03 18:17:29.016426456 +0000 UTC m=+6372.430248344" lastFinishedPulling="2025-12-03 18:17:31.332378676 +0000 UTC m=+6374.746200564" observedRunningTime="2025-12-03 18:17:31.774867697 +0000 UTC m=+6375.188689595" watchObservedRunningTime="2025-12-03 18:17:31.776774388 +0000 UTC m=+6375.190596266" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.012652 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-855bcc4775-7nbfz"] Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.014356 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.036256 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-855bcc4775-7nbfz"] Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.064780 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-7fd779bdd6-7q2dt"] Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.066132 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.080505 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-b989cf878-ltc5j"] Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.081972 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.095426 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-7fd779bdd6-7q2dt"] Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.106392 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-b989cf878-ltc5j"] Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.165672 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-combined-ca-bundle\") pod \"heat-api-b989cf878-ltc5j\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.165765 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd-config-data\") pod \"heat-engine-855bcc4775-7nbfz\" (UID: \"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd\") " pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.165829 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-config-data-custom\") pod \"heat-api-b989cf878-ltc5j\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.165966 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-combined-ca-bundle\") pod \"heat-cfnapi-7fd779bdd6-7q2dt\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.165991 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hldqm\" (UniqueName: \"kubernetes.io/projected/ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd-kube-api-access-hldqm\") pod \"heat-engine-855bcc4775-7nbfz\" (UID: \"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd\") " pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.166025 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-config-data-custom\") pod \"heat-cfnapi-7fd779bdd6-7q2dt\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.166054 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5tjm\" (UniqueName: \"kubernetes.io/projected/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-kube-api-access-n5tjm\") pod \"heat-api-b989cf878-ltc5j\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.166102 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd-combined-ca-bundle\") pod \"heat-engine-855bcc4775-7nbfz\" (UID: \"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd\") " pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.166150 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9n6p\" (UniqueName: \"kubernetes.io/projected/afb3f81d-a644-4291-964f-e467b85c77fa-kube-api-access-s9n6p\") pod \"heat-cfnapi-7fd779bdd6-7q2dt\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.166245 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-config-data\") pod \"heat-cfnapi-7fd779bdd6-7q2dt\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.166275 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd-config-data-custom\") pod \"heat-engine-855bcc4775-7nbfz\" (UID: \"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd\") " pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.166291 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-config-data\") pod \"heat-api-b989cf878-ltc5j\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.267975 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9n6p\" (UniqueName: \"kubernetes.io/projected/afb3f81d-a644-4291-964f-e467b85c77fa-kube-api-access-s9n6p\") pod \"heat-cfnapi-7fd779bdd6-7q2dt\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.268055 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-config-data\") pod \"heat-cfnapi-7fd779bdd6-7q2dt\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.268079 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd-config-data-custom\") pod \"heat-engine-855bcc4775-7nbfz\" (UID: \"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd\") " pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.268094 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-config-data\") pod \"heat-api-b989cf878-ltc5j\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.268129 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-combined-ca-bundle\") pod \"heat-api-b989cf878-ltc5j\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.268156 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd-config-data\") pod \"heat-engine-855bcc4775-7nbfz\" (UID: \"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd\") " pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.268182 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-config-data-custom\") pod \"heat-api-b989cf878-ltc5j\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.268230 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-combined-ca-bundle\") pod \"heat-cfnapi-7fd779bdd6-7q2dt\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.268248 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hldqm\" (UniqueName: \"kubernetes.io/projected/ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd-kube-api-access-hldqm\") pod \"heat-engine-855bcc4775-7nbfz\" (UID: \"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd\") " pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.269482 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-config-data-custom\") pod \"heat-cfnapi-7fd779bdd6-7q2dt\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.269544 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5tjm\" (UniqueName: \"kubernetes.io/projected/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-kube-api-access-n5tjm\") pod \"heat-api-b989cf878-ltc5j\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.269594 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd-combined-ca-bundle\") pod \"heat-engine-855bcc4775-7nbfz\" (UID: \"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd\") " pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.275089 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd-config-data\") pod \"heat-engine-855bcc4775-7nbfz\" (UID: \"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd\") " pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.275442 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-combined-ca-bundle\") pod \"heat-cfnapi-7fd779bdd6-7q2dt\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.276398 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-config-data-custom\") pod \"heat-cfnapi-7fd779bdd6-7q2dt\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.277031 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd-config-data-custom\") pod \"heat-engine-855bcc4775-7nbfz\" (UID: \"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd\") " pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.277109 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-config-data-custom\") pod \"heat-api-b989cf878-ltc5j\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.277540 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-config-data\") pod \"heat-cfnapi-7fd779bdd6-7q2dt\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.284575 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-config-data\") pod \"heat-api-b989cf878-ltc5j\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.286310 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-combined-ca-bundle\") pod \"heat-api-b989cf878-ltc5j\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.288647 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd-combined-ca-bundle\") pod \"heat-engine-855bcc4775-7nbfz\" (UID: \"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd\") " pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.295297 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9n6p\" (UniqueName: \"kubernetes.io/projected/afb3f81d-a644-4291-964f-e467b85c77fa-kube-api-access-s9n6p\") pod \"heat-cfnapi-7fd779bdd6-7q2dt\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.298995 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hldqm\" (UniqueName: \"kubernetes.io/projected/ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd-kube-api-access-hldqm\") pod \"heat-engine-855bcc4775-7nbfz\" (UID: \"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd\") " pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.305343 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5tjm\" (UniqueName: \"kubernetes.io/projected/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-kube-api-access-n5tjm\") pod \"heat-api-b989cf878-ltc5j\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.385627 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.402964 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.415898 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:35 crc kubenswrapper[5002]: I1203 18:17:35.888128 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-855bcc4775-7nbfz"] Dec 03 18:17:35 crc kubenswrapper[5002]: W1203 18:17:35.892330 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac58b8f9_9adc_47b2_a4bf_3023e9dc67bd.slice/crio-5d7c527ca0981b4cb64082f9c4c40e5fba34a3f1dddf6f870a71a853cfc92f9c WatchSource:0}: Error finding container 5d7c527ca0981b4cb64082f9c4c40e5fba34a3f1dddf6f870a71a853cfc92f9c: Status 404 returned error can't find the container with id 5d7c527ca0981b4cb64082f9c4c40e5fba34a3f1dddf6f870a71a853cfc92f9c Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.024629 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-7fd779bdd6-7q2dt"] Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.037420 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-b989cf878-ltc5j"] Dec 03 18:17:36 crc kubenswrapper[5002]: W1203 18:17:36.048958 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podafb3f81d_a644_4291_964f_e467b85c77fa.slice/crio-81df689061faa5e41b5bdd5674f0e24809a4d5c65360b0baff8726c50bad7d01 WatchSource:0}: Error finding container 81df689061faa5e41b5bdd5674f0e24809a4d5c65360b0baff8726c50bad7d01: Status 404 returned error can't find the container with id 81df689061faa5e41b5bdd5674f0e24809a4d5c65360b0baff8726c50bad7d01 Dec 03 18:17:36 crc kubenswrapper[5002]: W1203 18:17:36.049239 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61d4d6c4_a7ae_4c2f_bc5a_a31d45680b9d.slice/crio-4e4a195f87b1d56a75134a85853fe7eea3aed61b6f74baec8bfa7fb89b1860bc WatchSource:0}: Error finding container 4e4a195f87b1d56a75134a85853fe7eea3aed61b6f74baec8bfa7fb89b1860bc: Status 404 returned error can't find the container with id 4e4a195f87b1d56a75134a85853fe7eea3aed61b6f74baec8bfa7fb89b1860bc Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.152066 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-746b747946-smp42"] Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.152329 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-cfnapi-746b747946-smp42" podUID="18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7" containerName="heat-cfnapi" containerID="cri-o://4dd404f7a9f7de18e72828f49c0a5e231a7d569d2398505f3ed005219f75e892" gracePeriod=60 Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.165000 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/heat-cfnapi-746b747946-smp42" podUID="18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7" containerName="heat-cfnapi" probeResult="failure" output="Get \"http://10.217.1.124:8000/healthcheck\": EOF" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.168720 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-5c79b54d59-bnljn"] Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.168935 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-api-5c79b54d59-bnljn" podUID="c4fa7e7d-0739-47ea-99ba-b157cc3b453f" containerName="heat-api" containerID="cri-o://9d17080e8f60d6145574b3b83fc0f624eb70bec9933197183c3858887f069514" gracePeriod=60 Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.193487 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-ff4576f96-gqvql"] Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.194883 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.206848 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-cfnapi-public-svc" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.207222 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-cfnapi-internal-svc" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.209295 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/heat-api-5c79b54d59-bnljn" podUID="c4fa7e7d-0739-47ea-99ba-b157cc3b453f" containerName="heat-api" probeResult="failure" output="Get \"http://10.217.1.123:8004/healthcheck\": EOF" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.272364 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-7bf9f874bd-lg5mg"] Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.274455 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.277090 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-api-internal-svc" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.280196 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-api-public-svc" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.285800 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-ff4576f96-gqvql"] Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.288007 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-config-data\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.288202 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-public-tls-certs\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.288336 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-config-data-custom\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.288394 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-269ln\" (UniqueName: \"kubernetes.io/projected/144a5d00-65c6-445a-ad71-2de503e96a0c-kube-api-access-269ln\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.288472 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-combined-ca-bundle\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.288541 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-internal-tls-certs\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.294720 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-7bf9f874bd-lg5mg"] Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.390722 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-internal-tls-certs\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.390833 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-config-data\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.390983 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-config-data\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.391031 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-config-data-custom\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.391186 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-public-tls-certs\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.391254 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-public-tls-certs\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.391296 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-combined-ca-bundle\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.391357 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-config-data-custom\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.391419 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-269ln\" (UniqueName: \"kubernetes.io/projected/144a5d00-65c6-445a-ad71-2de503e96a0c-kube-api-access-269ln\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.391525 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-combined-ca-bundle\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.391592 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcp5g\" (UniqueName: \"kubernetes.io/projected/c59ef7e0-b477-4e81-abf0-ac5212a5c546-kube-api-access-mcp5g\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.391635 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-internal-tls-certs\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.395791 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-public-tls-certs\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.398050 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-config-data-custom\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.399233 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-config-data\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.404250 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-internal-tls-certs\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.406348 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/144a5d00-65c6-445a-ad71-2de503e96a0c-combined-ca-bundle\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.413412 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-269ln\" (UniqueName: \"kubernetes.io/projected/144a5d00-65c6-445a-ad71-2de503e96a0c-kube-api-access-269ln\") pod \"heat-cfnapi-ff4576f96-gqvql\" (UID: \"144a5d00-65c6-445a-ad71-2de503e96a0c\") " pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.495536 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcp5g\" (UniqueName: \"kubernetes.io/projected/c59ef7e0-b477-4e81-abf0-ac5212a5c546-kube-api-access-mcp5g\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.495782 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-internal-tls-certs\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.495930 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-config-data\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.496005 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-config-data-custom\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.496135 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-public-tls-certs\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.496214 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-combined-ca-bundle\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.501663 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-internal-tls-certs\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.501731 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-combined-ca-bundle\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.502530 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-config-data-custom\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.502949 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-config-data\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.502994 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c59ef7e0-b477-4e81-abf0-ac5212a5c546-public-tls-certs\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.517786 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcp5g\" (UniqueName: \"kubernetes.io/projected/c59ef7e0-b477-4e81-abf0-ac5212a5c546-kube-api-access-mcp5g\") pod \"heat-api-7bf9f874bd-lg5mg\" (UID: \"c59ef7e0-b477-4e81-abf0-ac5212a5c546\") " pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.576885 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.628951 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.820429 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-b989cf878-ltc5j" event={"ID":"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d","Type":"ContainerStarted","Data":"afaf20a907b2723282de88594298ae4599901116d3cad358095a685022ac254a"} Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.820771 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-b989cf878-ltc5j" event={"ID":"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d","Type":"ContainerStarted","Data":"4e4a195f87b1d56a75134a85853fe7eea3aed61b6f74baec8bfa7fb89b1860bc"} Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.821461 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.828407 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" event={"ID":"afb3f81d-a644-4291-964f-e467b85c77fa","Type":"ContainerStarted","Data":"2dc6b0a220bfd8ef4889fb3dc770d33064b3b15b6f3557ed41851e95bc23bd64"} Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.829151 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" event={"ID":"afb3f81d-a644-4291-964f-e467b85c77fa","Type":"ContainerStarted","Data":"81df689061faa5e41b5bdd5674f0e24809a4d5c65360b0baff8726c50bad7d01"} Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.829972 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.830983 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-855bcc4775-7nbfz" event={"ID":"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd","Type":"ContainerStarted","Data":"c4fdd9bb7ac0b223211c0839ecf55bd844a66ec97e2026a89830bd3aa48ece1a"} Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.831025 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-855bcc4775-7nbfz" event={"ID":"ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd","Type":"ContainerStarted","Data":"5d7c527ca0981b4cb64082f9c4c40e5fba34a3f1dddf6f870a71a853cfc92f9c"} Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.832065 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.856401 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-b989cf878-ltc5j" podStartSLOduration=1.856381369 podStartE2EDuration="1.856381369s" podCreationTimestamp="2025-12-03 18:17:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:17:36.847697805 +0000 UTC m=+6380.261519693" watchObservedRunningTime="2025-12-03 18:17:36.856381369 +0000 UTC m=+6380.270203257" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.896143 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-855bcc4775-7nbfz" podStartSLOduration=2.89612593 podStartE2EDuration="2.89612593s" podCreationTimestamp="2025-12-03 18:17:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:17:36.881614389 +0000 UTC m=+6380.295436277" watchObservedRunningTime="2025-12-03 18:17:36.89612593 +0000 UTC m=+6380.309947818" Dec 03 18:17:36 crc kubenswrapper[5002]: I1203 18:17:36.921638 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" podStartSLOduration=1.921618437 podStartE2EDuration="1.921618437s" podCreationTimestamp="2025-12-03 18:17:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:17:36.919814449 +0000 UTC m=+6380.333636337" watchObservedRunningTime="2025-12-03 18:17:36.921618437 +0000 UTC m=+6380.335440325" Dec 03 18:17:37 crc kubenswrapper[5002]: I1203 18:17:37.360537 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-ff4576f96-gqvql"] Dec 03 18:17:37 crc kubenswrapper[5002]: I1203 18:17:37.380275 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-7bf9f874bd-lg5mg"] Dec 03 18:17:37 crc kubenswrapper[5002]: W1203 18:17:37.398231 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc59ef7e0_b477_4e81_abf0_ac5212a5c546.slice/crio-0d3eb5bef56ef4d25d1105482a27450766b0fb321405bd6525e42f090c6b61c6 WatchSource:0}: Error finding container 0d3eb5bef56ef4d25d1105482a27450766b0fb321405bd6525e42f090c6b61c6: Status 404 returned error can't find the container with id 0d3eb5bef56ef4d25d1105482a27450766b0fb321405bd6525e42f090c6b61c6 Dec 03 18:17:37 crc kubenswrapper[5002]: W1203 18:17:37.401800 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod144a5d00_65c6_445a_ad71_2de503e96a0c.slice/crio-1b1a59c410f7eb4b391027a6ee42853c84e21f1209f2d7f1b504245833763bce WatchSource:0}: Error finding container 1b1a59c410f7eb4b391027a6ee42853c84e21f1209f2d7f1b504245833763bce: Status 404 returned error can't find the container with id 1b1a59c410f7eb4b391027a6ee42853c84e21f1209f2d7f1b504245833763bce Dec 03 18:17:37 crc kubenswrapper[5002]: I1203 18:17:37.866394 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-ff4576f96-gqvql" event={"ID":"144a5d00-65c6-445a-ad71-2de503e96a0c","Type":"ContainerStarted","Data":"1b1a59c410f7eb4b391027a6ee42853c84e21f1209f2d7f1b504245833763bce"} Dec 03 18:17:37 crc kubenswrapper[5002]: I1203 18:17:37.869830 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-7bf9f874bd-lg5mg" event={"ID":"c59ef7e0-b477-4e81-abf0-ac5212a5c546","Type":"ContainerStarted","Data":"0d3eb5bef56ef4d25d1105482a27450766b0fb321405bd6525e42f090c6b61c6"} Dec 03 18:17:37 crc kubenswrapper[5002]: I1203 18:17:37.871389 5002 generic.go:334] "Generic (PLEG): container finished" podID="61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" containerID="afaf20a907b2723282de88594298ae4599901116d3cad358095a685022ac254a" exitCode=1 Dec 03 18:17:37 crc kubenswrapper[5002]: I1203 18:17:37.871456 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-b989cf878-ltc5j" event={"ID":"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d","Type":"ContainerDied","Data":"afaf20a907b2723282de88594298ae4599901116d3cad358095a685022ac254a"} Dec 03 18:17:37 crc kubenswrapper[5002]: I1203 18:17:37.872067 5002 scope.go:117] "RemoveContainer" containerID="afaf20a907b2723282de88594298ae4599901116d3cad358095a685022ac254a" Dec 03 18:17:37 crc kubenswrapper[5002]: I1203 18:17:37.873291 5002 generic.go:334] "Generic (PLEG): container finished" podID="afb3f81d-a644-4291-964f-e467b85c77fa" containerID="2dc6b0a220bfd8ef4889fb3dc770d33064b3b15b6f3557ed41851e95bc23bd64" exitCode=1 Dec 03 18:17:37 crc kubenswrapper[5002]: I1203 18:17:37.873619 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" event={"ID":"afb3f81d-a644-4291-964f-e467b85c77fa","Type":"ContainerDied","Data":"2dc6b0a220bfd8ef4889fb3dc770d33064b3b15b6f3557ed41851e95bc23bd64"} Dec 03 18:17:37 crc kubenswrapper[5002]: I1203 18:17:37.874340 5002 scope.go:117] "RemoveContainer" containerID="2dc6b0a220bfd8ef4889fb3dc770d33064b3b15b6f3557ed41851e95bc23bd64" Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.885514 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-7bf9f874bd-lg5mg" event={"ID":"c59ef7e0-b477-4e81-abf0-ac5212a5c546","Type":"ContainerStarted","Data":"e6e1e6aaf51c236e05587b431cd9f6795bea3877466a3264c25a6d7f95d58435"} Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.885929 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.887878 5002 generic.go:334] "Generic (PLEG): container finished" podID="61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" containerID="e2587b1f20480e3b6209cfaa23e951604707bd9e41681c1d570b4c666db3deac" exitCode=1 Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.887957 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-b989cf878-ltc5j" event={"ID":"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d","Type":"ContainerDied","Data":"e2587b1f20480e3b6209cfaa23e951604707bd9e41681c1d570b4c666db3deac"} Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.887993 5002 scope.go:117] "RemoveContainer" containerID="afaf20a907b2723282de88594298ae4599901116d3cad358095a685022ac254a" Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.888660 5002 scope.go:117] "RemoveContainer" containerID="e2587b1f20480e3b6209cfaa23e951604707bd9e41681c1d570b4c666db3deac" Dec 03 18:17:38 crc kubenswrapper[5002]: E1203 18:17:38.888974 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-b989cf878-ltc5j_openstack(61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d)\"" pod="openstack/heat-api-b989cf878-ltc5j" podUID="61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.889988 5002 generic.go:334] "Generic (PLEG): container finished" podID="afb3f81d-a644-4291-964f-e467b85c77fa" containerID="549d948e2d2d1f61ed9c97c2f469d7edd40b9863d47fe115c53382b67d77dc2c" exitCode=1 Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.890038 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" event={"ID":"afb3f81d-a644-4291-964f-e467b85c77fa","Type":"ContainerDied","Data":"549d948e2d2d1f61ed9c97c2f469d7edd40b9863d47fe115c53382b67d77dc2c"} Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.890442 5002 scope.go:117] "RemoveContainer" containerID="549d948e2d2d1f61ed9c97c2f469d7edd40b9863d47fe115c53382b67d77dc2c" Dec 03 18:17:38 crc kubenswrapper[5002]: E1203 18:17:38.890712 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-7fd779bdd6-7q2dt_openstack(afb3f81d-a644-4291-964f-e467b85c77fa)\"" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" podUID="afb3f81d-a644-4291-964f-e467b85c77fa" Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.892258 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-ff4576f96-gqvql" event={"ID":"144a5d00-65c6-445a-ad71-2de503e96a0c","Type":"ContainerStarted","Data":"775946caadebcaf42df7d8328d985c2f85997a895a6b94bbccf42291d883ae46"} Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.892915 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.927684 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-7bf9f874bd-lg5mg" podStartSLOduration=2.927660309 podStartE2EDuration="2.927660309s" podCreationTimestamp="2025-12-03 18:17:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:17:38.914521394 +0000 UTC m=+6382.328343282" watchObservedRunningTime="2025-12-03 18:17:38.927660309 +0000 UTC m=+6382.341482197" Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.940927 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-ff4576f96-gqvql" podStartSLOduration=2.940907395 podStartE2EDuration="2.940907395s" podCreationTimestamp="2025-12-03 18:17:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:17:38.934217355 +0000 UTC m=+6382.348039243" watchObservedRunningTime="2025-12-03 18:17:38.940907395 +0000 UTC m=+6382.354729283" Dec 03 18:17:38 crc kubenswrapper[5002]: I1203 18:17:38.978933 5002 scope.go:117] "RemoveContainer" containerID="2dc6b0a220bfd8ef4889fb3dc770d33064b3b15b6f3557ed41851e95bc23bd64" Dec 03 18:17:39 crc kubenswrapper[5002]: I1203 18:17:39.908229 5002 scope.go:117] "RemoveContainer" containerID="e2587b1f20480e3b6209cfaa23e951604707bd9e41681c1d570b4c666db3deac" Dec 03 18:17:39 crc kubenswrapper[5002]: E1203 18:17:39.908510 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-b989cf878-ltc5j_openstack(61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d)\"" pod="openstack/heat-api-b989cf878-ltc5j" podUID="61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" Dec 03 18:17:39 crc kubenswrapper[5002]: I1203 18:17:39.911849 5002 scope.go:117] "RemoveContainer" containerID="549d948e2d2d1f61ed9c97c2f469d7edd40b9863d47fe115c53382b67d77dc2c" Dec 03 18:17:39 crc kubenswrapper[5002]: E1203 18:17:39.912030 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-7fd779bdd6-7q2dt_openstack(afb3f81d-a644-4291-964f-e467b85c77fa)\"" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" podUID="afb3f81d-a644-4291-964f-e467b85c77fa" Dec 03 18:17:40 crc kubenswrapper[5002]: I1203 18:17:40.397419 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:40 crc kubenswrapper[5002]: I1203 18:17:40.403901 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:40 crc kubenswrapper[5002]: I1203 18:17:40.403966 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:40 crc kubenswrapper[5002]: I1203 18:17:40.416459 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:40 crc kubenswrapper[5002]: I1203 18:17:40.416519 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:40 crc kubenswrapper[5002]: I1203 18:17:40.920490 5002 scope.go:117] "RemoveContainer" containerID="549d948e2d2d1f61ed9c97c2f469d7edd40b9863d47fe115c53382b67d77dc2c" Dec 03 18:17:40 crc kubenswrapper[5002]: I1203 18:17:40.920556 5002 scope.go:117] "RemoveContainer" containerID="e2587b1f20480e3b6209cfaa23e951604707bd9e41681c1d570b4c666db3deac" Dec 03 18:17:40 crc kubenswrapper[5002]: E1203 18:17:40.920788 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-b989cf878-ltc5j_openstack(61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d)\"" pod="openstack/heat-api-b989cf878-ltc5j" podUID="61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" Dec 03 18:17:40 crc kubenswrapper[5002]: E1203 18:17:40.920818 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-7fd779bdd6-7q2dt_openstack(afb3f81d-a644-4291-964f-e467b85c77fa)\"" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" podUID="afb3f81d-a644-4291-964f-e467b85c77fa" Dec 03 18:17:41 crc kubenswrapper[5002]: I1203 18:17:41.629351 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/heat-api-5c79b54d59-bnljn" podUID="c4fa7e7d-0739-47ea-99ba-b157cc3b453f" containerName="heat-api" probeResult="failure" output="Get \"http://10.217.1.123:8004/healthcheck\": read tcp 10.217.0.2:55946->10.217.1.123:8004: read: connection reset by peer" Dec 03 18:17:41 crc kubenswrapper[5002]: I1203 18:17:41.933397 5002 generic.go:334] "Generic (PLEG): container finished" podID="c4fa7e7d-0739-47ea-99ba-b157cc3b453f" containerID="9d17080e8f60d6145574b3b83fc0f624eb70bec9933197183c3858887f069514" exitCode=0 Dec 03 18:17:41 crc kubenswrapper[5002]: I1203 18:17:41.933440 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5c79b54d59-bnljn" event={"ID":"c4fa7e7d-0739-47ea-99ba-b157cc3b453f","Type":"ContainerDied","Data":"9d17080e8f60d6145574b3b83fc0f624eb70bec9933197183c3858887f069514"} Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.131140 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.260683 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-config-data\") pod \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.260905 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-config-data-custom\") pod \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.260936 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8lxz\" (UniqueName: \"kubernetes.io/projected/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-kube-api-access-f8lxz\") pod \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.261034 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-combined-ca-bundle\") pod \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\" (UID: \"c4fa7e7d-0739-47ea-99ba-b157cc3b453f\") " Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.270021 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c4fa7e7d-0739-47ea-99ba-b157cc3b453f" (UID: "c4fa7e7d-0739-47ea-99ba-b157cc3b453f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.270116 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-kube-api-access-f8lxz" (OuterVolumeSpecName: "kube-api-access-f8lxz") pod "c4fa7e7d-0739-47ea-99ba-b157cc3b453f" (UID: "c4fa7e7d-0739-47ea-99ba-b157cc3b453f"). InnerVolumeSpecName "kube-api-access-f8lxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.321069 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4fa7e7d-0739-47ea-99ba-b157cc3b453f" (UID: "c4fa7e7d-0739-47ea-99ba-b157cc3b453f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.341965 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-config-data" (OuterVolumeSpecName: "config-data") pod "c4fa7e7d-0739-47ea-99ba-b157cc3b453f" (UID: "c4fa7e7d-0739-47ea-99ba-b157cc3b453f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.364541 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.364605 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8lxz\" (UniqueName: \"kubernetes.io/projected/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-kube-api-access-f8lxz\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.364625 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.364643 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4fa7e7d-0739-47ea-99ba-b157cc3b453f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.484887 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-57d6578878-lnnzf" Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.587376 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-54665f9df8-dr6n7"] Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.588309 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-54665f9df8-dr6n7" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerName="horizon-log" containerID="cri-o://8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43" gracePeriod=30 Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.588474 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-54665f9df8-dr6n7" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerName="horizon" containerID="cri-o://bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266" gracePeriod=30 Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.616630 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/heat-cfnapi-746b747946-smp42" podUID="18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7" containerName="heat-cfnapi" probeResult="failure" output="Get \"http://10.217.1.124:8000/healthcheck\": read tcp 10.217.0.2:52584->10.217.1.124:8000: read: connection reset by peer" Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.947989 5002 generic.go:334] "Generic (PLEG): container finished" podID="18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7" containerID="4dd404f7a9f7de18e72828f49c0a5e231a7d569d2398505f3ed005219f75e892" exitCode=0 Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.948301 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-746b747946-smp42" event={"ID":"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7","Type":"ContainerDied","Data":"4dd404f7a9f7de18e72828f49c0a5e231a7d569d2398505f3ed005219f75e892"} Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.951169 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5c79b54d59-bnljn" event={"ID":"c4fa7e7d-0739-47ea-99ba-b157cc3b453f","Type":"ContainerDied","Data":"304b85b41f1f05658803c4fec3d0b8e9dcba396636e89e0f97ee52ac32032b4a"} Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.951233 5002 scope.go:117] "RemoveContainer" containerID="9d17080e8f60d6145574b3b83fc0f624eb70bec9933197183c3858887f069514" Dec 03 18:17:42 crc kubenswrapper[5002]: I1203 18:17:42.951262 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5c79b54d59-bnljn" Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.061317 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.084891 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-5c79b54d59-bnljn"] Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.094467 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-5c79b54d59-bnljn"] Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.185847 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-config-data\") pod \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.186045 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btb5c\" (UniqueName: \"kubernetes.io/projected/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-kube-api-access-btb5c\") pod \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.186174 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-config-data-custom\") pod \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.186198 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-combined-ca-bundle\") pod \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\" (UID: \"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7\") " Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.194403 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-kube-api-access-btb5c" (OuterVolumeSpecName: "kube-api-access-btb5c") pod "18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7" (UID: "18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7"). InnerVolumeSpecName "kube-api-access-btb5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.205554 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7" (UID: "18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.234656 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7" (UID: "18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.259172 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-config-data" (OuterVolumeSpecName: "config-data") pod "18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7" (UID: "18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.288380 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btb5c\" (UniqueName: \"kubernetes.io/projected/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-kube-api-access-btb5c\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.288419 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.288430 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.288440 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.982888 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-746b747946-smp42" event={"ID":"18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7","Type":"ContainerDied","Data":"6d964f1a00d1d16553881c7e21416e5199764d10cfcb3dd1afcf45640a7120d7"} Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.982952 5002 scope.go:117] "RemoveContainer" containerID="4dd404f7a9f7de18e72828f49c0a5e231a7d569d2398505f3ed005219f75e892" Dec 03 18:17:43 crc kubenswrapper[5002]: I1203 18:17:43.982965 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-746b747946-smp42" Dec 03 18:17:44 crc kubenswrapper[5002]: I1203 18:17:44.036455 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-746b747946-smp42"] Dec 03 18:17:44 crc kubenswrapper[5002]: I1203 18:17:44.046282 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-746b747946-smp42"] Dec 03 18:17:44 crc kubenswrapper[5002]: I1203 18:17:44.437091 5002 scope.go:117] "RemoveContainer" containerID="a637488e4973b57fca4db7fe782eb9c62fb70f159d75cbdb21e2124d3c8b10fd" Dec 03 18:17:44 crc kubenswrapper[5002]: I1203 18:17:44.464913 5002 scope.go:117] "RemoveContainer" containerID="b382000f85dc3f39894b6a9e980dadc0fd5e81c338831e357bb5f45bbdaf37cf" Dec 03 18:17:44 crc kubenswrapper[5002]: I1203 18:17:44.534837 5002 scope.go:117] "RemoveContainer" containerID="de5a4bbe1a6a67967de86ec2b7070676b671e9d6acbd35930c0a654261bbf5f4" Dec 03 18:17:44 crc kubenswrapper[5002]: I1203 18:17:44.578491 5002 scope.go:117] "RemoveContainer" containerID="d52032ffb3eaf234e6896c2d91456e29f08121b634d471eee11529936ca1f206" Dec 03 18:17:44 crc kubenswrapper[5002]: I1203 18:17:44.629670 5002 scope.go:117] "RemoveContainer" containerID="ae845aadb52a499aca93f0eb1c28ccb744ad47f7a7bc6df9bffda4870f0e58f3" Dec 03 18:17:44 crc kubenswrapper[5002]: I1203 18:17:44.870164 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7" path="/var/lib/kubelet/pods/18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7/volumes" Dec 03 18:17:44 crc kubenswrapper[5002]: I1203 18:17:44.870972 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4fa7e7d-0739-47ea-99ba-b157cc3b453f" path="/var/lib/kubelet/pods/c4fa7e7d-0739-47ea-99ba-b157cc3b453f/volumes" Dec 03 18:17:47 crc kubenswrapper[5002]: I1203 18:17:47.017201 5002 generic.go:334] "Generic (PLEG): container finished" podID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerID="bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266" exitCode=0 Dec 03 18:17:47 crc kubenswrapper[5002]: I1203 18:17:47.017242 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54665f9df8-dr6n7" event={"ID":"6eb582c6-bdc8-4289-8f44-ee5d3699053d","Type":"ContainerDied","Data":"bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266"} Dec 03 18:17:47 crc kubenswrapper[5002]: I1203 18:17:47.908615 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-ff4576f96-gqvql" Dec 03 18:17:47 crc kubenswrapper[5002]: I1203 18:17:47.975524 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-7fd779bdd6-7q2dt"] Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.003334 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-7bf9f874bd-lg5mg" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.086277 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-b989cf878-ltc5j"] Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.524226 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.536235 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.543145 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.621096 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-config-data\") pod \"afb3f81d-a644-4291-964f-e467b85c77fa\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.621141 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5tjm\" (UniqueName: \"kubernetes.io/projected/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-kube-api-access-n5tjm\") pod \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.621223 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-config-data\") pod \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.621290 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9n6p\" (UniqueName: \"kubernetes.io/projected/afb3f81d-a644-4291-964f-e467b85c77fa-kube-api-access-s9n6p\") pod \"afb3f81d-a644-4291-964f-e467b85c77fa\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.621340 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-config-data-custom\") pod \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.621399 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-config-data-custom\") pod \"afb3f81d-a644-4291-964f-e467b85c77fa\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.621476 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-combined-ca-bundle\") pod \"afb3f81d-a644-4291-964f-e467b85c77fa\" (UID: \"afb3f81d-a644-4291-964f-e467b85c77fa\") " Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.621524 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-combined-ca-bundle\") pod \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\" (UID: \"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d\") " Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.633362 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afb3f81d-a644-4291-964f-e467b85c77fa-kube-api-access-s9n6p" (OuterVolumeSpecName: "kube-api-access-s9n6p") pod "afb3f81d-a644-4291-964f-e467b85c77fa" (UID: "afb3f81d-a644-4291-964f-e467b85c77fa"). InnerVolumeSpecName "kube-api-access-s9n6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.634267 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "afb3f81d-a644-4291-964f-e467b85c77fa" (UID: "afb3f81d-a644-4291-964f-e467b85c77fa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.637950 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-kube-api-access-n5tjm" (OuterVolumeSpecName: "kube-api-access-n5tjm") pod "61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" (UID: "61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d"). InnerVolumeSpecName "kube-api-access-n5tjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.646496 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" (UID: "61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.662535 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" (UID: "61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.665461 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "afb3f81d-a644-4291-964f-e467b85c77fa" (UID: "afb3f81d-a644-4291-964f-e467b85c77fa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.692521 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-config-data" (OuterVolumeSpecName: "config-data") pod "61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" (UID: "61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.704306 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-config-data" (OuterVolumeSpecName: "config-data") pod "afb3f81d-a644-4291-964f-e467b85c77fa" (UID: "afb3f81d-a644-4291-964f-e467b85c77fa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.724552 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.724594 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5tjm\" (UniqueName: \"kubernetes.io/projected/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-kube-api-access-n5tjm\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.724610 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.724621 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9n6p\" (UniqueName: \"kubernetes.io/projected/afb3f81d-a644-4291-964f-e467b85c77fa-kube-api-access-s9n6p\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.724631 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.724642 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.724652 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb3f81d-a644-4291-964f-e467b85c77fa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:48 crc kubenswrapper[5002]: I1203 18:17:48.724662 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:17:49 crc kubenswrapper[5002]: I1203 18:17:49.038728 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-b989cf878-ltc5j" Dec 03 18:17:49 crc kubenswrapper[5002]: I1203 18:17:49.038730 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-b989cf878-ltc5j" event={"ID":"61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d","Type":"ContainerDied","Data":"4e4a195f87b1d56a75134a85853fe7eea3aed61b6f74baec8bfa7fb89b1860bc"} Dec 03 18:17:49 crc kubenswrapper[5002]: I1203 18:17:49.039180 5002 scope.go:117] "RemoveContainer" containerID="e2587b1f20480e3b6209cfaa23e951604707bd9e41681c1d570b4c666db3deac" Dec 03 18:17:49 crc kubenswrapper[5002]: I1203 18:17:49.042044 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" event={"ID":"afb3f81d-a644-4291-964f-e467b85c77fa","Type":"ContainerDied","Data":"81df689061faa5e41b5bdd5674f0e24809a4d5c65360b0baff8726c50bad7d01"} Dec 03 18:17:49 crc kubenswrapper[5002]: I1203 18:17:49.042324 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7fd779bdd6-7q2dt" Dec 03 18:17:49 crc kubenswrapper[5002]: I1203 18:17:49.069771 5002 scope.go:117] "RemoveContainer" containerID="549d948e2d2d1f61ed9c97c2f469d7edd40b9863d47fe115c53382b67d77dc2c" Dec 03 18:17:49 crc kubenswrapper[5002]: I1203 18:17:49.072669 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-b989cf878-ltc5j"] Dec 03 18:17:49 crc kubenswrapper[5002]: I1203 18:17:49.087604 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-b989cf878-ltc5j"] Dec 03 18:17:49 crc kubenswrapper[5002]: I1203 18:17:49.102648 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-7fd779bdd6-7q2dt"] Dec 03 18:17:49 crc kubenswrapper[5002]: I1203 18:17:49.113234 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-7fd779bdd6-7q2dt"] Dec 03 18:17:50 crc kubenswrapper[5002]: I1203 18:17:50.864490 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" path="/var/lib/kubelet/pods/61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d/volumes" Dec 03 18:17:50 crc kubenswrapper[5002]: I1203 18:17:50.865588 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afb3f81d-a644-4291-964f-e467b85c77fa" path="/var/lib/kubelet/pods/afb3f81d-a644-4291-964f-e467b85c77fa/volumes" Dec 03 18:17:51 crc kubenswrapper[5002]: I1203 18:17:51.854316 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-54665f9df8-dr6n7" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.114:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.114:8443: connect: connection refused" Dec 03 18:17:55 crc kubenswrapper[5002]: I1203 18:17:55.414400 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-855bcc4775-7nbfz" Dec 03 18:17:55 crc kubenswrapper[5002]: I1203 18:17:55.475470 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-577995c5c6-vwxf6"] Dec 03 18:17:55 crc kubenswrapper[5002]: I1203 18:17:55.475666 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-engine-577995c5c6-vwxf6" podUID="ac39d25b-ee08-46dc-9ec0-3a6d91737197" containerName="heat-engine" containerID="cri-o://c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf" gracePeriod=60 Dec 03 18:17:58 crc kubenswrapper[5002]: E1203 18:17:58.497536 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 03 18:17:58 crc kubenswrapper[5002]: E1203 18:17:58.499381 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 03 18:17:58 crc kubenswrapper[5002]: E1203 18:17:58.502826 5002 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 03 18:17:58 crc kubenswrapper[5002]: E1203 18:17:58.502857 5002 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/heat-engine-577995c5c6-vwxf6" podUID="ac39d25b-ee08-46dc-9ec0-3a6d91737197" containerName="heat-engine" Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.690707 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.809767 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-config-data\") pod \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.809941 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-combined-ca-bundle\") pod \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.810072 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-config-data-custom\") pod \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.810152 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkkbj\" (UniqueName: \"kubernetes.io/projected/ac39d25b-ee08-46dc-9ec0-3a6d91737197-kube-api-access-gkkbj\") pod \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\" (UID: \"ac39d25b-ee08-46dc-9ec0-3a6d91737197\") " Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.816035 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac39d25b-ee08-46dc-9ec0-3a6d91737197-kube-api-access-gkkbj" (OuterVolumeSpecName: "kube-api-access-gkkbj") pod "ac39d25b-ee08-46dc-9ec0-3a6d91737197" (UID: "ac39d25b-ee08-46dc-9ec0-3a6d91737197"). InnerVolumeSpecName "kube-api-access-gkkbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.818887 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ac39d25b-ee08-46dc-9ec0-3a6d91737197" (UID: "ac39d25b-ee08-46dc-9ec0-3a6d91737197"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.843980 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ac39d25b-ee08-46dc-9ec0-3a6d91737197" (UID: "ac39d25b-ee08-46dc-9ec0-3a6d91737197"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.854331 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-54665f9df8-dr6n7" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.114:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.114:8443: connect: connection refused" Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.870312 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-config-data" (OuterVolumeSpecName: "config-data") pod "ac39d25b-ee08-46dc-9ec0-3a6d91737197" (UID: "ac39d25b-ee08-46dc-9ec0-3a6d91737197"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.913687 5002 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.913758 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkkbj\" (UniqueName: \"kubernetes.io/projected/ac39d25b-ee08-46dc-9ec0-3a6d91737197-kube-api-access-gkkbj\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.913774 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:01 crc kubenswrapper[5002]: I1203 18:18:01.913785 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac39d25b-ee08-46dc-9ec0-3a6d91737197-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:02 crc kubenswrapper[5002]: I1203 18:18:02.172786 5002 generic.go:334] "Generic (PLEG): container finished" podID="ac39d25b-ee08-46dc-9ec0-3a6d91737197" containerID="c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf" exitCode=0 Dec 03 18:18:02 crc kubenswrapper[5002]: I1203 18:18:02.172837 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-577995c5c6-vwxf6" Dec 03 18:18:02 crc kubenswrapper[5002]: I1203 18:18:02.172864 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-577995c5c6-vwxf6" event={"ID":"ac39d25b-ee08-46dc-9ec0-3a6d91737197","Type":"ContainerDied","Data":"c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf"} Dec 03 18:18:02 crc kubenswrapper[5002]: I1203 18:18:02.172929 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-577995c5c6-vwxf6" event={"ID":"ac39d25b-ee08-46dc-9ec0-3a6d91737197","Type":"ContainerDied","Data":"38c3a5d838355dbc1283c9b4d6b91178e773334544e27c2a82fe155f89bc5af0"} Dec 03 18:18:02 crc kubenswrapper[5002]: I1203 18:18:02.172954 5002 scope.go:117] "RemoveContainer" containerID="c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf" Dec 03 18:18:02 crc kubenswrapper[5002]: I1203 18:18:02.208845 5002 scope.go:117] "RemoveContainer" containerID="c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf" Dec 03 18:18:02 crc kubenswrapper[5002]: E1203 18:18:02.211835 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf\": container with ID starting with c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf not found: ID does not exist" containerID="c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf" Dec 03 18:18:02 crc kubenswrapper[5002]: I1203 18:18:02.211877 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf"} err="failed to get container status \"c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf\": rpc error: code = NotFound desc = could not find container \"c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf\": container with ID starting with c3af6ceccd014df88a5d87c13bce3ccb16da7a3641e2eccb47730fa6265c2edf not found: ID does not exist" Dec 03 18:18:02 crc kubenswrapper[5002]: I1203 18:18:02.215003 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-577995c5c6-vwxf6"] Dec 03 18:18:02 crc kubenswrapper[5002]: I1203 18:18:02.224148 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-engine-577995c5c6-vwxf6"] Dec 03 18:18:02 crc kubenswrapper[5002]: I1203 18:18:02.860488 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac39d25b-ee08-46dc-9ec0-3a6d91737197" path="/var/lib/kubelet/pods/ac39d25b-ee08-46dc-9ec0-3a6d91737197/volumes" Dec 03 18:18:11 crc kubenswrapper[5002]: I1203 18:18:11.853964 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-54665f9df8-dr6n7" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.114:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.114:8443: connect: connection refused" Dec 03 18:18:11 crc kubenswrapper[5002]: I1203 18:18:11.854680 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.000863 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.126718 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-horizon-secret-key\") pod \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.127358 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6eb582c6-bdc8-4289-8f44-ee5d3699053d-config-data\") pod \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.127444 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-combined-ca-bundle\") pod \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.127471 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv2gl\" (UniqueName: \"kubernetes.io/projected/6eb582c6-bdc8-4289-8f44-ee5d3699053d-kube-api-access-rv2gl\") pod \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.127540 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6eb582c6-bdc8-4289-8f44-ee5d3699053d-logs\") pod \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.127606 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-horizon-tls-certs\") pod \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.127631 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6eb582c6-bdc8-4289-8f44-ee5d3699053d-scripts\") pod \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\" (UID: \"6eb582c6-bdc8-4289-8f44-ee5d3699053d\") " Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.127958 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6eb582c6-bdc8-4289-8f44-ee5d3699053d-logs" (OuterVolumeSpecName: "logs") pod "6eb582c6-bdc8-4289-8f44-ee5d3699053d" (UID: "6eb582c6-bdc8-4289-8f44-ee5d3699053d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.128136 5002 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6eb582c6-bdc8-4289-8f44-ee5d3699053d-logs\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.135609 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6eb582c6-bdc8-4289-8f44-ee5d3699053d-kube-api-access-rv2gl" (OuterVolumeSpecName: "kube-api-access-rv2gl") pod "6eb582c6-bdc8-4289-8f44-ee5d3699053d" (UID: "6eb582c6-bdc8-4289-8f44-ee5d3699053d"). InnerVolumeSpecName "kube-api-access-rv2gl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.138326 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "6eb582c6-bdc8-4289-8f44-ee5d3699053d" (UID: "6eb582c6-bdc8-4289-8f44-ee5d3699053d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.162612 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eb582c6-bdc8-4289-8f44-ee5d3699053d-config-data" (OuterVolumeSpecName: "config-data") pod "6eb582c6-bdc8-4289-8f44-ee5d3699053d" (UID: "6eb582c6-bdc8-4289-8f44-ee5d3699053d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.169084 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eb582c6-bdc8-4289-8f44-ee5d3699053d-scripts" (OuterVolumeSpecName: "scripts") pod "6eb582c6-bdc8-4289-8f44-ee5d3699053d" (UID: "6eb582c6-bdc8-4289-8f44-ee5d3699053d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.207225 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "6eb582c6-bdc8-4289-8f44-ee5d3699053d" (UID: "6eb582c6-bdc8-4289-8f44-ee5d3699053d"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.209870 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6eb582c6-bdc8-4289-8f44-ee5d3699053d" (UID: "6eb582c6-bdc8-4289-8f44-ee5d3699053d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.229512 5002 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.229544 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6eb582c6-bdc8-4289-8f44-ee5d3699053d-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.229554 5002 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.229563 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6eb582c6-bdc8-4289-8f44-ee5d3699053d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.229572 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eb582c6-bdc8-4289-8f44-ee5d3699053d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.229580 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv2gl\" (UniqueName: \"kubernetes.io/projected/6eb582c6-bdc8-4289-8f44-ee5d3699053d-kube-api-access-rv2gl\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.300945 5002 generic.go:334] "Generic (PLEG): container finished" podID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerID="8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43" exitCode=137 Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.300992 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54665f9df8-dr6n7" event={"ID":"6eb582c6-bdc8-4289-8f44-ee5d3699053d","Type":"ContainerDied","Data":"8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43"} Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.301020 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54665f9df8-dr6n7" event={"ID":"6eb582c6-bdc8-4289-8f44-ee5d3699053d","Type":"ContainerDied","Data":"a98f89ff6e566ae7d24762a4793c021e899b79256575f61feda0c4eb897567f2"} Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.301036 5002 scope.go:117] "RemoveContainer" containerID="bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.301172 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-54665f9df8-dr6n7" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.350939 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-54665f9df8-dr6n7"] Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.371269 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-54665f9df8-dr6n7"] Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.479538 5002 scope.go:117] "RemoveContainer" containerID="8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.500635 5002 scope.go:117] "RemoveContainer" containerID="bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266" Dec 03 18:18:13 crc kubenswrapper[5002]: E1203 18:18:13.501202 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266\": container with ID starting with bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266 not found: ID does not exist" containerID="bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.501257 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266"} err="failed to get container status \"bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266\": rpc error: code = NotFound desc = could not find container \"bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266\": container with ID starting with bc52cf2a1482b6c4e8f67176ab6f0a19d401a344bac3b91ae1cb0cfcc6831266 not found: ID does not exist" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.501289 5002 scope.go:117] "RemoveContainer" containerID="8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43" Dec 03 18:18:13 crc kubenswrapper[5002]: E1203 18:18:13.501596 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43\": container with ID starting with 8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43 not found: ID does not exist" containerID="8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43" Dec 03 18:18:13 crc kubenswrapper[5002]: I1203 18:18:13.501684 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43"} err="failed to get container status \"8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43\": rpc error: code = NotFound desc = could not find container \"8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43\": container with ID starting with 8b0dca802d6f465174440963714d83fc09c7b7ee1289c4fc26cfc8c49fcd8e43 not found: ID does not exist" Dec 03 18:18:14 crc kubenswrapper[5002]: I1203 18:18:14.850581 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" path="/var/lib/kubelet/pods/6eb582c6-bdc8-4289-8f44-ee5d3699053d/volumes" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.693577 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh"] Dec 03 18:18:17 crc kubenswrapper[5002]: E1203 18:18:17.694231 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac39d25b-ee08-46dc-9ec0-3a6d91737197" containerName="heat-engine" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694245 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac39d25b-ee08-46dc-9ec0-3a6d91737197" containerName="heat-engine" Dec 03 18:18:17 crc kubenswrapper[5002]: E1203 18:18:17.694259 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerName="horizon" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694265 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerName="horizon" Dec 03 18:18:17 crc kubenswrapper[5002]: E1203 18:18:17.694277 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afb3f81d-a644-4291-964f-e467b85c77fa" containerName="heat-cfnapi" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694283 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="afb3f81d-a644-4291-964f-e467b85c77fa" containerName="heat-cfnapi" Dec 03 18:18:17 crc kubenswrapper[5002]: E1203 18:18:17.694294 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7" containerName="heat-cfnapi" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694299 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7" containerName="heat-cfnapi" Dec 03 18:18:17 crc kubenswrapper[5002]: E1203 18:18:17.694316 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" containerName="heat-api" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694322 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" containerName="heat-api" Dec 03 18:18:17 crc kubenswrapper[5002]: E1203 18:18:17.694333 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" containerName="heat-api" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694340 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" containerName="heat-api" Dec 03 18:18:17 crc kubenswrapper[5002]: E1203 18:18:17.694355 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4fa7e7d-0739-47ea-99ba-b157cc3b453f" containerName="heat-api" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694360 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4fa7e7d-0739-47ea-99ba-b157cc3b453f" containerName="heat-api" Dec 03 18:18:17 crc kubenswrapper[5002]: E1203 18:18:17.694376 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerName="horizon-log" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694382 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerName="horizon-log" Dec 03 18:18:17 crc kubenswrapper[5002]: E1203 18:18:17.694392 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afb3f81d-a644-4291-964f-e467b85c77fa" containerName="heat-cfnapi" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694397 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="afb3f81d-a644-4291-964f-e467b85c77fa" containerName="heat-cfnapi" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694558 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4fa7e7d-0739-47ea-99ba-b157cc3b453f" containerName="heat-api" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694569 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="18ecf5a5-e6cc-41f8-8f8b-49f55b35a1c7" containerName="heat-cfnapi" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694582 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" containerName="heat-api" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694592 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerName="horizon-log" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694601 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="afb3f81d-a644-4291-964f-e467b85c77fa" containerName="heat-cfnapi" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694611 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="61d4d6c4-a7ae-4c2f-bc5a-a31d45680b9d" containerName="heat-api" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694618 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eb582c6-bdc8-4289-8f44-ee5d3699053d" containerName="horizon" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694628 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="afb3f81d-a644-4291-964f-e467b85c77fa" containerName="heat-cfnapi" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.694644 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac39d25b-ee08-46dc-9ec0-3a6d91737197" containerName="heat-engine" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.696003 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.699734 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.726684 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh"] Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.751874 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eed6bf02-5a3f-401a-9e18-72374acb6931-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh\" (UID: \"eed6bf02-5a3f-401a-9e18-72374acb6931\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.751947 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vncl\" (UniqueName: \"kubernetes.io/projected/eed6bf02-5a3f-401a-9e18-72374acb6931-kube-api-access-2vncl\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh\" (UID: \"eed6bf02-5a3f-401a-9e18-72374acb6931\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.752000 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eed6bf02-5a3f-401a-9e18-72374acb6931-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh\" (UID: \"eed6bf02-5a3f-401a-9e18-72374acb6931\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.854159 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eed6bf02-5a3f-401a-9e18-72374acb6931-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh\" (UID: \"eed6bf02-5a3f-401a-9e18-72374acb6931\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.854489 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vncl\" (UniqueName: \"kubernetes.io/projected/eed6bf02-5a3f-401a-9e18-72374acb6931-kube-api-access-2vncl\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh\" (UID: \"eed6bf02-5a3f-401a-9e18-72374acb6931\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.854526 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eed6bf02-5a3f-401a-9e18-72374acb6931-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh\" (UID: \"eed6bf02-5a3f-401a-9e18-72374acb6931\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.854801 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eed6bf02-5a3f-401a-9e18-72374acb6931-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh\" (UID: \"eed6bf02-5a3f-401a-9e18-72374acb6931\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.854964 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eed6bf02-5a3f-401a-9e18-72374acb6931-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh\" (UID: \"eed6bf02-5a3f-401a-9e18-72374acb6931\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:17 crc kubenswrapper[5002]: I1203 18:18:17.876563 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vncl\" (UniqueName: \"kubernetes.io/projected/eed6bf02-5a3f-401a-9e18-72374acb6931-kube-api-access-2vncl\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh\" (UID: \"eed6bf02-5a3f-401a-9e18-72374acb6931\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:18 crc kubenswrapper[5002]: I1203 18:18:18.045939 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:18 crc kubenswrapper[5002]: I1203 18:18:18.551982 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh"] Dec 03 18:18:19 crc kubenswrapper[5002]: I1203 18:18:19.353196 5002 generic.go:334] "Generic (PLEG): container finished" podID="eed6bf02-5a3f-401a-9e18-72374acb6931" containerID="83dff6ab7fa263141d352df31941b78bd44f626542b531fed570828820f4b1c9" exitCode=0 Dec 03 18:18:19 crc kubenswrapper[5002]: I1203 18:18:19.353295 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" event={"ID":"eed6bf02-5a3f-401a-9e18-72374acb6931","Type":"ContainerDied","Data":"83dff6ab7fa263141d352df31941b78bd44f626542b531fed570828820f4b1c9"} Dec 03 18:18:19 crc kubenswrapper[5002]: I1203 18:18:19.353523 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" event={"ID":"eed6bf02-5a3f-401a-9e18-72374acb6931","Type":"ContainerStarted","Data":"1700c63fbb780ca2f74b83244b0112c3e9a104d4517ad93726562bba4de3bfdf"} Dec 03 18:18:21 crc kubenswrapper[5002]: I1203 18:18:21.371008 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" event={"ID":"eed6bf02-5a3f-401a-9e18-72374acb6931","Type":"ContainerStarted","Data":"6537ed1005d07b6254e023a5ce5a470863c2e925de24804347fc988bd69db3bb"} Dec 03 18:18:22 crc kubenswrapper[5002]: I1203 18:18:22.386335 5002 generic.go:334] "Generic (PLEG): container finished" podID="eed6bf02-5a3f-401a-9e18-72374acb6931" containerID="6537ed1005d07b6254e023a5ce5a470863c2e925de24804347fc988bd69db3bb" exitCode=0 Dec 03 18:18:22 crc kubenswrapper[5002]: I1203 18:18:22.387030 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" event={"ID":"eed6bf02-5a3f-401a-9e18-72374acb6931","Type":"ContainerDied","Data":"6537ed1005d07b6254e023a5ce5a470863c2e925de24804347fc988bd69db3bb"} Dec 03 18:18:23 crc kubenswrapper[5002]: I1203 18:18:23.401323 5002 generic.go:334] "Generic (PLEG): container finished" podID="eed6bf02-5a3f-401a-9e18-72374acb6931" containerID="9453a76c5dff5c1f42a8042cde064ed8884842026ad36970a68e1ab4b0e7eb55" exitCode=0 Dec 03 18:18:23 crc kubenswrapper[5002]: I1203 18:18:23.401396 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" event={"ID":"eed6bf02-5a3f-401a-9e18-72374acb6931","Type":"ContainerDied","Data":"9453a76c5dff5c1f42a8042cde064ed8884842026ad36970a68e1ab4b0e7eb55"} Dec 03 18:18:24 crc kubenswrapper[5002]: I1203 18:18:24.916019 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:24 crc kubenswrapper[5002]: I1203 18:18:24.918589 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eed6bf02-5a3f-401a-9e18-72374acb6931-bundle\") pod \"eed6bf02-5a3f-401a-9e18-72374acb6931\" (UID: \"eed6bf02-5a3f-401a-9e18-72374acb6931\") " Dec 03 18:18:24 crc kubenswrapper[5002]: I1203 18:18:24.918784 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eed6bf02-5a3f-401a-9e18-72374acb6931-util\") pod \"eed6bf02-5a3f-401a-9e18-72374acb6931\" (UID: \"eed6bf02-5a3f-401a-9e18-72374acb6931\") " Dec 03 18:18:24 crc kubenswrapper[5002]: I1203 18:18:24.922129 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eed6bf02-5a3f-401a-9e18-72374acb6931-bundle" (OuterVolumeSpecName: "bundle") pod "eed6bf02-5a3f-401a-9e18-72374acb6931" (UID: "eed6bf02-5a3f-401a-9e18-72374acb6931"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:18:24 crc kubenswrapper[5002]: I1203 18:18:24.925516 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eed6bf02-5a3f-401a-9e18-72374acb6931-util" (OuterVolumeSpecName: "util") pod "eed6bf02-5a3f-401a-9e18-72374acb6931" (UID: "eed6bf02-5a3f-401a-9e18-72374acb6931"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:18:25 crc kubenswrapper[5002]: I1203 18:18:25.021256 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vncl\" (UniqueName: \"kubernetes.io/projected/eed6bf02-5a3f-401a-9e18-72374acb6931-kube-api-access-2vncl\") pod \"eed6bf02-5a3f-401a-9e18-72374acb6931\" (UID: \"eed6bf02-5a3f-401a-9e18-72374acb6931\") " Dec 03 18:18:25 crc kubenswrapper[5002]: I1203 18:18:25.021785 5002 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eed6bf02-5a3f-401a-9e18-72374acb6931-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:25 crc kubenswrapper[5002]: I1203 18:18:25.021802 5002 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eed6bf02-5a3f-401a-9e18-72374acb6931-util\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:25 crc kubenswrapper[5002]: I1203 18:18:25.029559 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eed6bf02-5a3f-401a-9e18-72374acb6931-kube-api-access-2vncl" (OuterVolumeSpecName: "kube-api-access-2vncl") pod "eed6bf02-5a3f-401a-9e18-72374acb6931" (UID: "eed6bf02-5a3f-401a-9e18-72374acb6931"). InnerVolumeSpecName "kube-api-access-2vncl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:18:25 crc kubenswrapper[5002]: I1203 18:18:25.123515 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vncl\" (UniqueName: \"kubernetes.io/projected/eed6bf02-5a3f-401a-9e18-72374acb6931-kube-api-access-2vncl\") on node \"crc\" DevicePath \"\"" Dec 03 18:18:25 crc kubenswrapper[5002]: I1203 18:18:25.425805 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" event={"ID":"eed6bf02-5a3f-401a-9e18-72374acb6931","Type":"ContainerDied","Data":"1700c63fbb780ca2f74b83244b0112c3e9a104d4517ad93726562bba4de3bfdf"} Dec 03 18:18:25 crc kubenswrapper[5002]: I1203 18:18:25.425992 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1700c63fbb780ca2f74b83244b0112c3e9a104d4517ad93726562bba4de3bfdf" Dec 03 18:18:25 crc kubenswrapper[5002]: I1203 18:18:25.425875 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh" Dec 03 18:18:32 crc kubenswrapper[5002]: I1203 18:18:32.067777 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-a01f-account-create-update-94srm"] Dec 03 18:18:32 crc kubenswrapper[5002]: I1203 18:18:32.078019 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-8tvgf"] Dec 03 18:18:32 crc kubenswrapper[5002]: I1203 18:18:32.086176 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-a01f-account-create-update-94srm"] Dec 03 18:18:32 crc kubenswrapper[5002]: I1203 18:18:32.101630 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-8tvgf"] Dec 03 18:18:32 crc kubenswrapper[5002]: I1203 18:18:32.858662 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ee68fc3-1a09-4d84-a183-d4d78f3f9006" path="/var/lib/kubelet/pods/9ee68fc3-1a09-4d84-a183-d4d78f3f9006/volumes" Dec 03 18:18:32 crc kubenswrapper[5002]: I1203 18:18:32.859706 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef699e44-5aa7-4513-bb2e-e51999ad601f" path="/var/lib/kubelet/pods/ef699e44-5aa7-4513-bb2e-e51999ad601f/volumes" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.261809 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-5s8x4"] Dec 03 18:18:36 crc kubenswrapper[5002]: E1203 18:18:36.262767 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eed6bf02-5a3f-401a-9e18-72374acb6931" containerName="extract" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.262782 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="eed6bf02-5a3f-401a-9e18-72374acb6931" containerName="extract" Dec 03 18:18:36 crc kubenswrapper[5002]: E1203 18:18:36.262795 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eed6bf02-5a3f-401a-9e18-72374acb6931" containerName="util" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.262802 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="eed6bf02-5a3f-401a-9e18-72374acb6931" containerName="util" Dec 03 18:18:36 crc kubenswrapper[5002]: E1203 18:18:36.262817 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eed6bf02-5a3f-401a-9e18-72374acb6931" containerName="pull" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.262823 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="eed6bf02-5a3f-401a-9e18-72374acb6931" containerName="pull" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.263015 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="eed6bf02-5a3f-401a-9e18-72374acb6931" containerName="extract" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.263719 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5s8x4" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.267204 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.267459 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-kp4lj" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.270026 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.278281 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-5s8x4"] Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.348963 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx"] Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.350418 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.352789 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.353167 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-b5xfj" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.363516 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rqwt\" (UniqueName: \"kubernetes.io/projected/427005c4-4ead-4ac2-b151-21f74eac9b18-kube-api-access-8rqwt\") pod \"obo-prometheus-operator-668cf9dfbb-5s8x4\" (UID: \"427005c4-4ead-4ac2-b151-21f74eac9b18\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5s8x4" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.367982 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c"] Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.369365 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.377012 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx"] Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.406164 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c"] Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.467566 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b32b9e1f-8c29-47c6-b0c1-e4389e55422a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-848b69754c-qxxsx\" (UID: \"b32b9e1f-8c29-47c6-b0c1-e4389e55422a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.467875 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b32b9e1f-8c29-47c6-b0c1-e4389e55422a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-848b69754c-qxxsx\" (UID: \"b32b9e1f-8c29-47c6-b0c1-e4389e55422a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.468116 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rqwt\" (UniqueName: \"kubernetes.io/projected/427005c4-4ead-4ac2-b151-21f74eac9b18-kube-api-access-8rqwt\") pod \"obo-prometheus-operator-668cf9dfbb-5s8x4\" (UID: \"427005c4-4ead-4ac2-b151-21f74eac9b18\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5s8x4" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.468287 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5bb2698d-64e4-4595-aa89-b4fb8751109c-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-848b69754c-lzv9c\" (UID: \"5bb2698d-64e4-4595-aa89-b4fb8751109c\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.468413 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5bb2698d-64e4-4595-aa89-b4fb8751109c-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-848b69754c-lzv9c\" (UID: \"5bb2698d-64e4-4595-aa89-b4fb8751109c\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.502406 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rqwt\" (UniqueName: \"kubernetes.io/projected/427005c4-4ead-4ac2-b151-21f74eac9b18-kube-api-access-8rqwt\") pod \"obo-prometheus-operator-668cf9dfbb-5s8x4\" (UID: \"427005c4-4ead-4ac2-b151-21f74eac9b18\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5s8x4" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.513625 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-mblh2"] Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.532371 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-mblh2"] Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.532762 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.539454 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-5tffk" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.539986 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.576444 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2l266\" (UniqueName: \"kubernetes.io/projected/40a6500c-ca6c-4b78-a9f5-cab04e7fae29-kube-api-access-2l266\") pod \"observability-operator-d8bb48f5d-mblh2\" (UID: \"40a6500c-ca6c-4b78-a9f5-cab04e7fae29\") " pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.576501 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/40a6500c-ca6c-4b78-a9f5-cab04e7fae29-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-mblh2\" (UID: \"40a6500c-ca6c-4b78-a9f5-cab04e7fae29\") " pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.576531 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5bb2698d-64e4-4595-aa89-b4fb8751109c-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-848b69754c-lzv9c\" (UID: \"5bb2698d-64e4-4595-aa89-b4fb8751109c\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.576580 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5bb2698d-64e4-4595-aa89-b4fb8751109c-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-848b69754c-lzv9c\" (UID: \"5bb2698d-64e4-4595-aa89-b4fb8751109c\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.576619 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b32b9e1f-8c29-47c6-b0c1-e4389e55422a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-848b69754c-qxxsx\" (UID: \"b32b9e1f-8c29-47c6-b0c1-e4389e55422a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.576637 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b32b9e1f-8c29-47c6-b0c1-e4389e55422a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-848b69754c-qxxsx\" (UID: \"b32b9e1f-8c29-47c6-b0c1-e4389e55422a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.580002 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b32b9e1f-8c29-47c6-b0c1-e4389e55422a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-848b69754c-qxxsx\" (UID: \"b32b9e1f-8c29-47c6-b0c1-e4389e55422a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.583167 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5bb2698d-64e4-4595-aa89-b4fb8751109c-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-848b69754c-lzv9c\" (UID: \"5bb2698d-64e4-4595-aa89-b4fb8751109c\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.584486 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b32b9e1f-8c29-47c6-b0c1-e4389e55422a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-848b69754c-qxxsx\" (UID: \"b32b9e1f-8c29-47c6-b0c1-e4389e55422a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.587682 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5s8x4" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.589690 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5bb2698d-64e4-4595-aa89-b4fb8751109c-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-848b69754c-lzv9c\" (UID: \"5bb2698d-64e4-4595-aa89-b4fb8751109c\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.673382 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.679425 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2l266\" (UniqueName: \"kubernetes.io/projected/40a6500c-ca6c-4b78-a9f5-cab04e7fae29-kube-api-access-2l266\") pod \"observability-operator-d8bb48f5d-mblh2\" (UID: \"40a6500c-ca6c-4b78-a9f5-cab04e7fae29\") " pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.679511 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/40a6500c-ca6c-4b78-a9f5-cab04e7fae29-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-mblh2\" (UID: \"40a6500c-ca6c-4b78-a9f5-cab04e7fae29\") " pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.709562 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.866468 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/40a6500c-ca6c-4b78-a9f5-cab04e7fae29-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-mblh2\" (UID: \"40a6500c-ca6c-4b78-a9f5-cab04e7fae29\") " pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.887784 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2l266\" (UniqueName: \"kubernetes.io/projected/40a6500c-ca6c-4b78-a9f5-cab04e7fae29-kube-api-access-2l266\") pod \"observability-operator-d8bb48f5d-mblh2\" (UID: \"40a6500c-ca6c-4b78-a9f5-cab04e7fae29\") " pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.969731 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-b6zsg"] Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.971668 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-b6zsg" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.981465 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-7z67f" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.984636 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjfkn\" (UniqueName: \"kubernetes.io/projected/1776756e-524b-4933-98d6-375c71008ac4-kube-api-access-tjfkn\") pod \"perses-operator-5446b9c989-b6zsg\" (UID: \"1776756e-524b-4933-98d6-375c71008ac4\") " pod="openshift-operators/perses-operator-5446b9c989-b6zsg" Dec 03 18:18:36 crc kubenswrapper[5002]: I1203 18:18:36.984866 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/1776756e-524b-4933-98d6-375c71008ac4-openshift-service-ca\") pod \"perses-operator-5446b9c989-b6zsg\" (UID: \"1776756e-524b-4933-98d6-375c71008ac4\") " pod="openshift-operators/perses-operator-5446b9c989-b6zsg" Dec 03 18:18:37 crc kubenswrapper[5002]: I1203 18:18:37.037362 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-b6zsg"] Dec 03 18:18:37 crc kubenswrapper[5002]: I1203 18:18:37.098724 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/1776756e-524b-4933-98d6-375c71008ac4-openshift-service-ca\") pod \"perses-operator-5446b9c989-b6zsg\" (UID: \"1776756e-524b-4933-98d6-375c71008ac4\") " pod="openshift-operators/perses-operator-5446b9c989-b6zsg" Dec 03 18:18:37 crc kubenswrapper[5002]: I1203 18:18:37.098999 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjfkn\" (UniqueName: \"kubernetes.io/projected/1776756e-524b-4933-98d6-375c71008ac4-kube-api-access-tjfkn\") pod \"perses-operator-5446b9c989-b6zsg\" (UID: \"1776756e-524b-4933-98d6-375c71008ac4\") " pod="openshift-operators/perses-operator-5446b9c989-b6zsg" Dec 03 18:18:37 crc kubenswrapper[5002]: I1203 18:18:37.100777 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/1776756e-524b-4933-98d6-375c71008ac4-openshift-service-ca\") pod \"perses-operator-5446b9c989-b6zsg\" (UID: \"1776756e-524b-4933-98d6-375c71008ac4\") " pod="openshift-operators/perses-operator-5446b9c989-b6zsg" Dec 03 18:18:37 crc kubenswrapper[5002]: I1203 18:18:37.128059 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjfkn\" (UniqueName: \"kubernetes.io/projected/1776756e-524b-4933-98d6-375c71008ac4-kube-api-access-tjfkn\") pod \"perses-operator-5446b9c989-b6zsg\" (UID: \"1776756e-524b-4933-98d6-375c71008ac4\") " pod="openshift-operators/perses-operator-5446b9c989-b6zsg" Dec 03 18:18:37 crc kubenswrapper[5002]: I1203 18:18:37.182670 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" Dec 03 18:18:37 crc kubenswrapper[5002]: I1203 18:18:37.382474 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-b6zsg" Dec 03 18:18:37 crc kubenswrapper[5002]: I1203 18:18:37.608669 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-5s8x4"] Dec 03 18:18:37 crc kubenswrapper[5002]: W1203 18:18:37.695899 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod427005c4_4ead_4ac2_b151_21f74eac9b18.slice/crio-6eb775b8d0bffcf8d0d554386ddcb1b627b00fb3761ef7a9b9c568b65072e8f1 WatchSource:0}: Error finding container 6eb775b8d0bffcf8d0d554386ddcb1b627b00fb3761ef7a9b9c568b65072e8f1: Status 404 returned error can't find the container with id 6eb775b8d0bffcf8d0d554386ddcb1b627b00fb3761ef7a9b9c568b65072e8f1 Dec 03 18:18:37 crc kubenswrapper[5002]: I1203 18:18:37.704755 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx"] Dec 03 18:18:37 crc kubenswrapper[5002]: I1203 18:18:37.887063 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c"] Dec 03 18:18:37 crc kubenswrapper[5002]: W1203 18:18:37.917206 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5bb2698d_64e4_4595_aa89_b4fb8751109c.slice/crio-06af6bd39176730bd78aa3c66c4b7509563ecdf316c1e806b5501f3167baa46b WatchSource:0}: Error finding container 06af6bd39176730bd78aa3c66c4b7509563ecdf316c1e806b5501f3167baa46b: Status 404 returned error can't find the container with id 06af6bd39176730bd78aa3c66c4b7509563ecdf316c1e806b5501f3167baa46b Dec 03 18:18:38 crc kubenswrapper[5002]: I1203 18:18:38.061854 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-mblh2"] Dec 03 18:18:38 crc kubenswrapper[5002]: W1203 18:18:38.160724 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1776756e_524b_4933_98d6_375c71008ac4.slice/crio-83debf54dac614a313fe2c45cc078b86cf47884837d02a217d7c1ba0d2703a44 WatchSource:0}: Error finding container 83debf54dac614a313fe2c45cc078b86cf47884837d02a217d7c1ba0d2703a44: Status 404 returned error can't find the container with id 83debf54dac614a313fe2c45cc078b86cf47884837d02a217d7c1ba0d2703a44 Dec 03 18:18:38 crc kubenswrapper[5002]: I1203 18:18:38.161059 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-b6zsg"] Dec 03 18:18:38 crc kubenswrapper[5002]: I1203 18:18:38.592521 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx" event={"ID":"b32b9e1f-8c29-47c6-b0c1-e4389e55422a","Type":"ContainerStarted","Data":"6debad633e77d98fb6b75c4e96c78246b9656e2e2f4c5c03f10b978c96b997bc"} Dec 03 18:18:38 crc kubenswrapper[5002]: I1203 18:18:38.593948 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" event={"ID":"40a6500c-ca6c-4b78-a9f5-cab04e7fae29","Type":"ContainerStarted","Data":"1cb7ef9810f4e95cc0e028331defa2fd8eb9c08c6d8d2d21193ab838b33aa81a"} Dec 03 18:18:38 crc kubenswrapper[5002]: I1203 18:18:38.594912 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c" event={"ID":"5bb2698d-64e4-4595-aa89-b4fb8751109c","Type":"ContainerStarted","Data":"06af6bd39176730bd78aa3c66c4b7509563ecdf316c1e806b5501f3167baa46b"} Dec 03 18:18:38 crc kubenswrapper[5002]: I1203 18:18:38.597483 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-b6zsg" event={"ID":"1776756e-524b-4933-98d6-375c71008ac4","Type":"ContainerStarted","Data":"83debf54dac614a313fe2c45cc078b86cf47884837d02a217d7c1ba0d2703a44"} Dec 03 18:18:38 crc kubenswrapper[5002]: I1203 18:18:38.602250 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5s8x4" event={"ID":"427005c4-4ead-4ac2-b151-21f74eac9b18","Type":"ContainerStarted","Data":"6eb775b8d0bffcf8d0d554386ddcb1b627b00fb3761ef7a9b9c568b65072e8f1"} Dec 03 18:18:44 crc kubenswrapper[5002]: I1203 18:18:44.042193 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-9wvmr"] Dec 03 18:18:44 crc kubenswrapper[5002]: I1203 18:18:44.053641 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-9wvmr"] Dec 03 18:18:44 crc kubenswrapper[5002]: I1203 18:18:44.794812 5002 scope.go:117] "RemoveContainer" containerID="a56ff886d8d328a3cec1e3e7469a99b8ec11cb82fa52e58da8d81e88ca231e97" Dec 03 18:18:44 crc kubenswrapper[5002]: I1203 18:18:44.853147 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b290899-0d62-4fab-9d24-1b3f36e7d2a4" path="/var/lib/kubelet/pods/6b290899-0d62-4fab-9d24-1b3f36e7d2a4/volumes" Dec 03 18:18:46 crc kubenswrapper[5002]: I1203 18:18:46.891433 5002 scope.go:117] "RemoveContainer" containerID="ac826346c2ba7d9ab474e64fabfe0891a737be696046cda133aeb7513565f1a5" Dec 03 18:18:46 crc kubenswrapper[5002]: I1203 18:18:46.975540 5002 scope.go:117] "RemoveContainer" containerID="f1b46b627f9ef8a4cf57a6af9970080b8844b03bde954836ffa4881cb22e98c2" Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.718153 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c" event={"ID":"5bb2698d-64e4-4595-aa89-b4fb8751109c","Type":"ContainerStarted","Data":"1d98043bae4edf5395ffe341ed77a5d27bebff363b90d8584619e9d30a48399e"} Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.719535 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-b6zsg" event={"ID":"1776756e-524b-4933-98d6-375c71008ac4","Type":"ContainerStarted","Data":"8a54811ef383dd22999c5c31aedb66c0d64663811c57135bd9ca4494e6606002"} Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.719680 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-b6zsg" Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.723145 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx" event={"ID":"b32b9e1f-8c29-47c6-b0c1-e4389e55422a","Type":"ContainerStarted","Data":"81fcff0194c587f444e88ad4f5a0fc0b0cdb50bd7b79c6c47036f0bf2e81f421"} Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.725939 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" event={"ID":"40a6500c-ca6c-4b78-a9f5-cab04e7fae29","Type":"ContainerStarted","Data":"19e3e949d3297885adea711dfd4f8c5856538b247245c1b48b44afd4723e19db"} Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.726090 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.727419 5002 patch_prober.go:28] interesting pod/observability-operator-d8bb48f5d-mblh2 container/operator namespace/openshift-operators: Readiness probe status=failure output="Get \"http://10.217.1.134:8081/healthz\": dial tcp 10.217.1.134:8081: connect: connection refused" start-of-body= Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.727458 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" podUID="40a6500c-ca6c-4b78-a9f5-cab04e7fae29" containerName="operator" probeResult="failure" output="Get \"http://10.217.1.134:8081/healthz\": dial tcp 10.217.1.134:8081: connect: connection refused" Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.729011 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5s8x4" event={"ID":"427005c4-4ead-4ac2-b151-21f74eac9b18","Type":"ContainerStarted","Data":"14443fdb79cdaf2efc74f610f8328b8e5544d721d2d0160825bc6ad9c727f1b5"} Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.742732 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-lzv9c" podStartSLOduration=2.775317296 podStartE2EDuration="11.742709671s" podCreationTimestamp="2025-12-03 18:18:36 +0000 UTC" firstStartedPulling="2025-12-03 18:18:37.925567674 +0000 UTC m=+6441.339389562" lastFinishedPulling="2025-12-03 18:18:46.892960029 +0000 UTC m=+6450.306781937" observedRunningTime="2025-12-03 18:18:47.73412868 +0000 UTC m=+6451.147950578" watchObservedRunningTime="2025-12-03 18:18:47.742709671 +0000 UTC m=+6451.156531559" Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.788261 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848b69754c-qxxsx" podStartSLOduration=2.600311202 podStartE2EDuration="11.788241588s" podCreationTimestamp="2025-12-03 18:18:36 +0000 UTC" firstStartedPulling="2025-12-03 18:18:37.715255349 +0000 UTC m=+6441.129077227" lastFinishedPulling="2025-12-03 18:18:46.903185705 +0000 UTC m=+6450.317007613" observedRunningTime="2025-12-03 18:18:47.76901345 +0000 UTC m=+6451.182835328" watchObservedRunningTime="2025-12-03 18:18:47.788241588 +0000 UTC m=+6451.202063476" Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.803200 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-b6zsg" podStartSLOduration=3.062031219 podStartE2EDuration="11.80317854s" podCreationTimestamp="2025-12-03 18:18:36 +0000 UTC" firstStartedPulling="2025-12-03 18:18:38.16302307 +0000 UTC m=+6441.576844958" lastFinishedPulling="2025-12-03 18:18:46.904170371 +0000 UTC m=+6450.317992279" observedRunningTime="2025-12-03 18:18:47.797467286 +0000 UTC m=+6451.211289174" watchObservedRunningTime="2025-12-03 18:18:47.80317854 +0000 UTC m=+6451.217000428" Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.855819 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5s8x4" podStartSLOduration=2.652380483 podStartE2EDuration="11.855800988s" podCreationTimestamp="2025-12-03 18:18:36 +0000 UTC" firstStartedPulling="2025-12-03 18:18:37.701160758 +0000 UTC m=+6441.114982646" lastFinishedPulling="2025-12-03 18:18:46.904581263 +0000 UTC m=+6450.318403151" observedRunningTime="2025-12-03 18:18:47.832289794 +0000 UTC m=+6451.246111692" watchObservedRunningTime="2025-12-03 18:18:47.855800988 +0000 UTC m=+6451.269622876" Dec 03 18:18:47 crc kubenswrapper[5002]: I1203 18:18:47.876940 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" podStartSLOduration=3.061426673 podStartE2EDuration="11.876917997s" podCreationTimestamp="2025-12-03 18:18:36 +0000 UTC" firstStartedPulling="2025-12-03 18:18:38.089928191 +0000 UTC m=+6441.503750079" lastFinishedPulling="2025-12-03 18:18:46.905419495 +0000 UTC m=+6450.319241403" observedRunningTime="2025-12-03 18:18:47.865380296 +0000 UTC m=+6451.279202194" watchObservedRunningTime="2025-12-03 18:18:47.876917997 +0000 UTC m=+6451.290739885" Dec 03 18:18:48 crc kubenswrapper[5002]: I1203 18:18:48.739096 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-mblh2" Dec 03 18:18:50 crc kubenswrapper[5002]: I1203 18:18:50.917611 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:18:50 crc kubenswrapper[5002]: I1203 18:18:50.917677 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:18:57 crc kubenswrapper[5002]: I1203 18:18:57.388373 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-b6zsg" Dec 03 18:18:59 crc kubenswrapper[5002]: I1203 18:18:59.990007 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 03 18:18:59 crc kubenswrapper[5002]: I1203 18:18:59.990727 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686" containerName="openstackclient" containerID="cri-o://27a070da1cd52dbcdc6abcec2f578745bb745df31095d097e4311d7fdd005577" gracePeriod=2 Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.003616 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.077355 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 18:19:00 crc kubenswrapper[5002]: E1203 18:19:00.077836 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686" containerName="openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.077853 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686" containerName="openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.078071 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686" containerName="openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.078786 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.134677 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.154967 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ca22aa38-32ed-4e21-aa8b-e0185634506d-openstack-config\") pod \"openstackclient\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.155030 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ca22aa38-32ed-4e21-aa8b-e0185634506d-openstack-config-secret\") pod \"openstackclient\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.155067 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca22aa38-32ed-4e21-aa8b-e0185634506d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.155190 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdm7b\" (UniqueName: \"kubernetes.io/projected/ca22aa38-32ed-4e21-aa8b-e0185634506d-kube-api-access-cdm7b\") pod \"openstackclient\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.155705 5002 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686" podUID="ca22aa38-32ed-4e21-aa8b-e0185634506d" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.264003 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ca22aa38-32ed-4e21-aa8b-e0185634506d-openstack-config\") pod \"openstackclient\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.264053 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ca22aa38-32ed-4e21-aa8b-e0185634506d-openstack-config-secret\") pod \"openstackclient\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.264077 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca22aa38-32ed-4e21-aa8b-e0185634506d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.264141 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdm7b\" (UniqueName: \"kubernetes.io/projected/ca22aa38-32ed-4e21-aa8b-e0185634506d-kube-api-access-cdm7b\") pod \"openstackclient\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.264939 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ca22aa38-32ed-4e21-aa8b-e0185634506d-openstack-config\") pod \"openstackclient\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.277526 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca22aa38-32ed-4e21-aa8b-e0185634506d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.288775 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ca22aa38-32ed-4e21-aa8b-e0185634506d-openstack-config-secret\") pod \"openstackclient\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.293551 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdm7b\" (UniqueName: \"kubernetes.io/projected/ca22aa38-32ed-4e21-aa8b-e0185634506d-kube-api-access-cdm7b\") pod \"openstackclient\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.330005 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.331327 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.336327 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-jmwjd" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.345366 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.368706 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m92p2\" (UniqueName: \"kubernetes.io/projected/9f647f42-8d07-4dbf-8358-d8e20957b795-kube-api-access-m92p2\") pod \"kube-state-metrics-0\" (UID: \"9f647f42-8d07-4dbf-8358-d8e20957b795\") " pod="openstack/kube-state-metrics-0" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.421353 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.470246 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m92p2\" (UniqueName: \"kubernetes.io/projected/9f647f42-8d07-4dbf-8358-d8e20957b795-kube-api-access-m92p2\") pod \"kube-state-metrics-0\" (UID: \"9f647f42-8d07-4dbf-8358-d8e20957b795\") " pod="openstack/kube-state-metrics-0" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.514275 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m92p2\" (UniqueName: \"kubernetes.io/projected/9f647f42-8d07-4dbf-8358-d8e20957b795-kube-api-access-m92p2\") pod \"kube-state-metrics-0\" (UID: \"9f647f42-8d07-4dbf-8358-d8e20957b795\") " pod="openstack/kube-state-metrics-0" Dec 03 18:19:00 crc kubenswrapper[5002]: I1203 18:19:00.716617 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.069036 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.089400 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.093364 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.096164 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-rn469" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.096356 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.096466 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.096566 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.096682 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.117118 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/17e91ab9-8baa-4b7f-b87e-99614ee85a63-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.117263 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/17e91ab9-8baa-4b7f-b87e-99614ee85a63-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.117317 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/17e91ab9-8baa-4b7f-b87e-99614ee85a63-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.117389 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/17e91ab9-8baa-4b7f-b87e-99614ee85a63-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.117434 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/17e91ab9-8baa-4b7f-b87e-99614ee85a63-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.117478 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94slf\" (UniqueName: \"kubernetes.io/projected/17e91ab9-8baa-4b7f-b87e-99614ee85a63-kube-api-access-94slf\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.117535 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/17e91ab9-8baa-4b7f-b87e-99614ee85a63-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.220432 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/17e91ab9-8baa-4b7f-b87e-99614ee85a63-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.220488 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/17e91ab9-8baa-4b7f-b87e-99614ee85a63-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.220529 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/17e91ab9-8baa-4b7f-b87e-99614ee85a63-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.220554 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/17e91ab9-8baa-4b7f-b87e-99614ee85a63-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.220580 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94slf\" (UniqueName: \"kubernetes.io/projected/17e91ab9-8baa-4b7f-b87e-99614ee85a63-kube-api-access-94slf\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.220611 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/17e91ab9-8baa-4b7f-b87e-99614ee85a63-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.220680 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/17e91ab9-8baa-4b7f-b87e-99614ee85a63-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.229520 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/17e91ab9-8baa-4b7f-b87e-99614ee85a63-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.237635 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/17e91ab9-8baa-4b7f-b87e-99614ee85a63-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.261000 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/17e91ab9-8baa-4b7f-b87e-99614ee85a63-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.270542 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/17e91ab9-8baa-4b7f-b87e-99614ee85a63-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.282345 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/17e91ab9-8baa-4b7f-b87e-99614ee85a63-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.283172 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/17e91ab9-8baa-4b7f-b87e-99614ee85a63-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.298970 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94slf\" (UniqueName: \"kubernetes.io/projected/17e91ab9-8baa-4b7f-b87e-99614ee85a63-kube-api-access-94slf\") pod \"alertmanager-metric-storage-0\" (UID: \"17e91ab9-8baa-4b7f-b87e-99614ee85a63\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.436719 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.556328 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.688633 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.691699 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.712519 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.713411 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-9l22p" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.713534 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.713631 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.713787 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.714315 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.750787 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.848443 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsqbg\" (UniqueName: \"kubernetes.io/projected/446440c7-e1a5-453c-b350-afee8d3a1f80-kube-api-access-bsqbg\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.848500 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/446440c7-e1a5-453c-b350-afee8d3a1f80-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.848569 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.848591 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/446440c7-e1a5-453c-b350-afee8d3a1f80-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.848620 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-config\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.848640 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.848670 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.848707 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/446440c7-e1a5-453c-b350-afee8d3a1f80-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.851886 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.951818 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/446440c7-e1a5-453c-b350-afee8d3a1f80-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.952286 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsqbg\" (UniqueName: \"kubernetes.io/projected/446440c7-e1a5-453c-b350-afee8d3a1f80-kube-api-access-bsqbg\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.952314 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/446440c7-e1a5-453c-b350-afee8d3a1f80-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.952438 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.952466 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/446440c7-e1a5-453c-b350-afee8d3a1f80-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.952512 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-config\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.952539 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.952575 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.960009 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/446440c7-e1a5-453c-b350-afee8d3a1f80-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.962049 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9f647f42-8d07-4dbf-8358-d8e20957b795","Type":"ContainerStarted","Data":"44e6aade50d461b9ec247f75234eb08f362074d8fae2671140ba140eeb3120e9"} Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.972329 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.972366 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.974557 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/446440c7-e1a5-453c-b350-afee8d3a1f80-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.975344 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ca22aa38-32ed-4e21-aa8b-e0185634506d","Type":"ContainerStarted","Data":"6a589f032c62c0cc344dd22930ae4572bd450e73b7ce0aa0c52deeb4ced1f45a"} Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.977987 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/446440c7-e1a5-453c-b350-afee8d3a1f80-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:01 crc kubenswrapper[5002]: I1203 18:19:01.989619 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-config\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:02 crc kubenswrapper[5002]: I1203 18:19:02.007592 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsqbg\" (UniqueName: \"kubernetes.io/projected/446440c7-e1a5-453c-b350-afee8d3a1f80-kube-api-access-bsqbg\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:02 crc kubenswrapper[5002]: I1203 18:19:02.025958 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 18:19:02 crc kubenswrapper[5002]: I1203 18:19:02.026003 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/94371cf951ef3da22c6d29514db5618c55bcf638f61084fc1b944304ccc42026/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:02 crc kubenswrapper[5002]: I1203 18:19:02.461265 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 03 18:19:02 crc kubenswrapper[5002]: I1203 18:19:02.538028 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\") pod \"prometheus-metric-storage-0\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:02 crc kubenswrapper[5002]: I1203 18:19:02.641604 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:02 crc kubenswrapper[5002]: I1203 18:19:02.966123 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 18:19:02 crc kubenswrapper[5002]: I1203 18:19:02.991149 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ca22aa38-32ed-4e21-aa8b-e0185634506d","Type":"ContainerStarted","Data":"d70907bc3bde979acd4451a27bc2559c5044f813c594ca4d4641eb6375b7ef80"} Dec 03 18:19:02 crc kubenswrapper[5002]: I1203 18:19:02.994687 5002 generic.go:334] "Generic (PLEG): container finished" podID="9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686" containerID="27a070da1cd52dbcdc6abcec2f578745bb745df31095d097e4311d7fdd005577" exitCode=137 Dec 03 18:19:02 crc kubenswrapper[5002]: I1203 18:19:02.994862 5002 scope.go:117] "RemoveContainer" containerID="27a070da1cd52dbcdc6abcec2f578745bb745df31095d097e4311d7fdd005577" Dec 03 18:19:02 crc kubenswrapper[5002]: I1203 18:19:02.995081 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 18:19:02 crc kubenswrapper[5002]: I1203 18:19:02.999659 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"17e91ab9-8baa-4b7f-b87e-99614ee85a63","Type":"ContainerStarted","Data":"6ccd9f53aef1723178c85321ef4ac8261db0c0840f1d86c1464dd51e99a0066e"} Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.022074 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-combined-ca-bundle\") pod \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.022541 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-openstack-config\") pod \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.022508 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.022464718 podStartE2EDuration="3.022464718s" podCreationTimestamp="2025-12-03 18:19:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:19:03.013975749 +0000 UTC m=+6466.427797657" watchObservedRunningTime="2025-12-03 18:19:03.022464718 +0000 UTC m=+6466.436286606" Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.022684 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-openstack-config-secret\") pod \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.022722 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svjlt\" (UniqueName: \"kubernetes.io/projected/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-kube-api-access-svjlt\") pod \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\" (UID: \"9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686\") " Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.033794 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-kube-api-access-svjlt" (OuterVolumeSpecName: "kube-api-access-svjlt") pod "9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686" (UID: "9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686"). InnerVolumeSpecName "kube-api-access-svjlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.052723 5002 scope.go:117] "RemoveContainer" containerID="27a070da1cd52dbcdc6abcec2f578745bb745df31095d097e4311d7fdd005577" Dec 03 18:19:03 crc kubenswrapper[5002]: E1203 18:19:03.053373 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27a070da1cd52dbcdc6abcec2f578745bb745df31095d097e4311d7fdd005577\": container with ID starting with 27a070da1cd52dbcdc6abcec2f578745bb745df31095d097e4311d7fdd005577 not found: ID does not exist" containerID="27a070da1cd52dbcdc6abcec2f578745bb745df31095d097e4311d7fdd005577" Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.053461 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27a070da1cd52dbcdc6abcec2f578745bb745df31095d097e4311d7fdd005577"} err="failed to get container status \"27a070da1cd52dbcdc6abcec2f578745bb745df31095d097e4311d7fdd005577\": rpc error: code = NotFound desc = could not find container \"27a070da1cd52dbcdc6abcec2f578745bb745df31095d097e4311d7fdd005577\": container with ID starting with 27a070da1cd52dbcdc6abcec2f578745bb745df31095d097e4311d7fdd005577 not found: ID does not exist" Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.084773 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686" (UID: "9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.097984 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686" (UID: "9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.125902 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.125941 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.125955 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svjlt\" (UniqueName: \"kubernetes.io/projected/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-kube-api-access-svjlt\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.152330 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686" (UID: "9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.228251 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.243618 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 18:19:03 crc kubenswrapper[5002]: W1203 18:19:03.250172 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod446440c7_e1a5_453c_b350_afee8d3a1f80.slice/crio-fbc8c813926e57e5e6388302e6355490d29f0cb042ad31bed4f995752b095029 WatchSource:0}: Error finding container fbc8c813926e57e5e6388302e6355490d29f0cb042ad31bed4f995752b095029: Status 404 returned error can't find the container with id fbc8c813926e57e5e6388302e6355490d29f0cb042ad31bed4f995752b095029 Dec 03 18:19:03 crc kubenswrapper[5002]: I1203 18:19:03.371127 5002 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686" podUID="ca22aa38-32ed-4e21-aa8b-e0185634506d" Dec 03 18:19:04 crc kubenswrapper[5002]: I1203 18:19:04.009500 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"446440c7-e1a5-453c-b350-afee8d3a1f80","Type":"ContainerStarted","Data":"fbc8c813926e57e5e6388302e6355490d29f0cb042ad31bed4f995752b095029"} Dec 03 18:19:04 crc kubenswrapper[5002]: I1203 18:19:04.012525 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9f647f42-8d07-4dbf-8358-d8e20957b795","Type":"ContainerStarted","Data":"7892f8ab4863464d0a7f8520c818cbd5d685bc1b39ef77da923960d5701c3315"} Dec 03 18:19:04 crc kubenswrapper[5002]: I1203 18:19:04.032048 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.356661771 podStartE2EDuration="4.032030005s" podCreationTimestamp="2025-12-03 18:19:00 +0000 UTC" firstStartedPulling="2025-12-03 18:19:01.874964775 +0000 UTC m=+6465.288786663" lastFinishedPulling="2025-12-03 18:19:02.550333009 +0000 UTC m=+6465.964154897" observedRunningTime="2025-12-03 18:19:04.025648203 +0000 UTC m=+6467.439470091" watchObservedRunningTime="2025-12-03 18:19:04.032030005 +0000 UTC m=+6467.445851893" Dec 03 18:19:04 crc kubenswrapper[5002]: I1203 18:19:04.858426 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686" path="/var/lib/kubelet/pods/9ca9f3d6-dbd1-4cbe-bc69-79f71ff48686/volumes" Dec 03 18:19:05 crc kubenswrapper[5002]: I1203 18:19:05.024772 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 18:19:10 crc kubenswrapper[5002]: I1203 18:19:10.083157 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"17e91ab9-8baa-4b7f-b87e-99614ee85a63","Type":"ContainerStarted","Data":"29c578bd4a2abaa32a6f9bd0f1f7c89df3779f928de6ca50dc71f1862954a839"} Dec 03 18:19:10 crc kubenswrapper[5002]: I1203 18:19:10.088614 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"446440c7-e1a5-453c-b350-afee8d3a1f80","Type":"ContainerStarted","Data":"1f7dc2610301b6e792e5fddb16de125f76b248db56cf4dd6b96a583acd72282b"} Dec 03 18:19:10 crc kubenswrapper[5002]: I1203 18:19:10.722634 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 18:19:16 crc kubenswrapper[5002]: I1203 18:19:16.044737 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-51bd-account-create-update-zdmpv"] Dec 03 18:19:16 crc kubenswrapper[5002]: I1203 18:19:16.055931 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-tltrr"] Dec 03 18:19:16 crc kubenswrapper[5002]: I1203 18:19:16.063958 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-51bd-account-create-update-zdmpv"] Dec 03 18:19:16 crc kubenswrapper[5002]: I1203 18:19:16.072716 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-tltrr"] Dec 03 18:19:16 crc kubenswrapper[5002]: I1203 18:19:16.861598 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3031e012-9670-43c9-8af7-7934214166fa" path="/var/lib/kubelet/pods/3031e012-9670-43c9-8af7-7934214166fa/volumes" Dec 03 18:19:16 crc kubenswrapper[5002]: I1203 18:19:16.863039 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="592398aa-42a0-4f3c-90af-b1d797d70463" path="/var/lib/kubelet/pods/592398aa-42a0-4f3c-90af-b1d797d70463/volumes" Dec 03 18:19:17 crc kubenswrapper[5002]: I1203 18:19:17.161464 5002 generic.go:334] "Generic (PLEG): container finished" podID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerID="1f7dc2610301b6e792e5fddb16de125f76b248db56cf4dd6b96a583acd72282b" exitCode=0 Dec 03 18:19:17 crc kubenswrapper[5002]: I1203 18:19:17.161557 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"446440c7-e1a5-453c-b350-afee8d3a1f80","Type":"ContainerDied","Data":"1f7dc2610301b6e792e5fddb16de125f76b248db56cf4dd6b96a583acd72282b"} Dec 03 18:19:17 crc kubenswrapper[5002]: I1203 18:19:17.163501 5002 generic.go:334] "Generic (PLEG): container finished" podID="17e91ab9-8baa-4b7f-b87e-99614ee85a63" containerID="29c578bd4a2abaa32a6f9bd0f1f7c89df3779f928de6ca50dc71f1862954a839" exitCode=0 Dec 03 18:19:17 crc kubenswrapper[5002]: I1203 18:19:17.163538 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"17e91ab9-8baa-4b7f-b87e-99614ee85a63","Type":"ContainerDied","Data":"29c578bd4a2abaa32a6f9bd0f1f7c89df3779f928de6ca50dc71f1862954a839"} Dec 03 18:19:20 crc kubenswrapper[5002]: I1203 18:19:20.198462 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"17e91ab9-8baa-4b7f-b87e-99614ee85a63","Type":"ContainerStarted","Data":"e6be00f84ded1ecb2e7eb7dfae3e5a29eae61cec5c6ac445eb2a8e6c33b5ddb0"} Dec 03 18:19:20 crc kubenswrapper[5002]: I1203 18:19:20.918941 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:19:20 crc kubenswrapper[5002]: I1203 18:19:20.919047 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:19:23 crc kubenswrapper[5002]: I1203 18:19:23.036983 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-4msfn"] Dec 03 18:19:23 crc kubenswrapper[5002]: I1203 18:19:23.049541 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-4msfn"] Dec 03 18:19:23 crc kubenswrapper[5002]: I1203 18:19:23.231458 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"17e91ab9-8baa-4b7f-b87e-99614ee85a63","Type":"ContainerStarted","Data":"376ece3e719de0bdd6e6462e0425f86d9eb9190d6a9e0992217d9631b170aec0"} Dec 03 18:19:23 crc kubenswrapper[5002]: I1203 18:19:23.231778 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:23 crc kubenswrapper[5002]: I1203 18:19:23.237155 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Dec 03 18:19:23 crc kubenswrapper[5002]: I1203 18:19:23.297727 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=5.325298744 podStartE2EDuration="22.29770945s" podCreationTimestamp="2025-12-03 18:19:01 +0000 UTC" firstStartedPulling="2025-12-03 18:19:02.488703869 +0000 UTC m=+6465.902525757" lastFinishedPulling="2025-12-03 18:19:19.461114575 +0000 UTC m=+6482.874936463" observedRunningTime="2025-12-03 18:19:23.265613215 +0000 UTC m=+6486.679435113" watchObservedRunningTime="2025-12-03 18:19:23.29770945 +0000 UTC m=+6486.711531338" Dec 03 18:19:24 crc kubenswrapper[5002]: I1203 18:19:24.863359 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8ce05c5-62fa-4073-b5c8-b196479099f3" path="/var/lib/kubelet/pods/f8ce05c5-62fa-4073-b5c8-b196479099f3/volumes" Dec 03 18:19:28 crc kubenswrapper[5002]: I1203 18:19:28.328216 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"446440c7-e1a5-453c-b350-afee8d3a1f80","Type":"ContainerStarted","Data":"2266fb3b93c4dabc7695b805f1d4f9d9e815dcbf0fc8c0c31007216c777cdabf"} Dec 03 18:19:33 crc kubenswrapper[5002]: I1203 18:19:33.393259 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"446440c7-e1a5-453c-b350-afee8d3a1f80","Type":"ContainerStarted","Data":"8f04c990454a7143628a57e3e72d2e85fae5b4d1ab4884e346fe28e01618d31e"} Dec 03 18:19:36 crc kubenswrapper[5002]: I1203 18:19:36.424680 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"446440c7-e1a5-453c-b350-afee8d3a1f80","Type":"ContainerStarted","Data":"21b99bcd07b23d0ec26b14a0fe9bfa20aa11d3a53142ec762efa533c392f7503"} Dec 03 18:19:36 crc kubenswrapper[5002]: I1203 18:19:36.456993 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=3.609297777 podStartE2EDuration="36.456971222s" podCreationTimestamp="2025-12-03 18:19:00 +0000 UTC" firstStartedPulling="2025-12-03 18:19:03.253108381 +0000 UTC m=+6466.666930269" lastFinishedPulling="2025-12-03 18:19:36.100781826 +0000 UTC m=+6499.514603714" observedRunningTime="2025-12-03 18:19:36.456492179 +0000 UTC m=+6499.870314077" watchObservedRunningTime="2025-12-03 18:19:36.456971222 +0000 UTC m=+6499.870793150" Dec 03 18:19:37 crc kubenswrapper[5002]: I1203 18:19:37.643138 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.197409 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.202180 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.209767 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.210224 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.211734 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.327455 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4wxt\" (UniqueName: \"kubernetes.io/projected/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-kube-api-access-h4wxt\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.327535 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-log-httpd\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.327599 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-config-data\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.327630 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-scripts\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.327674 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-run-httpd\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.327772 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.327829 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.429701 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-config-data\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.429786 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-scripts\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.429821 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-run-httpd\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.429873 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.429906 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.429987 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4wxt\" (UniqueName: \"kubernetes.io/projected/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-kube-api-access-h4wxt\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.430014 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-log-httpd\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.430502 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-log-httpd\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.430860 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-run-httpd\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.436853 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-scripts\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.436869 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.444829 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.447128 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-config-data\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.452869 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4wxt\" (UniqueName: \"kubernetes.io/projected/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-kube-api-access-h4wxt\") pod \"ceilometer-0\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " pod="openstack/ceilometer-0" Dec 03 18:19:42 crc kubenswrapper[5002]: I1203 18:19:42.526827 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 18:19:43 crc kubenswrapper[5002]: I1203 18:19:43.020405 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:19:43 crc kubenswrapper[5002]: W1203 18:19:43.030787 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf88da24_95ba_4fba_88ed_2f8f85fa5e3f.slice/crio-a3a1adff309cd776a3fdcab84322b28deaba83836a34a6b2e3dafae00cc9941e WatchSource:0}: Error finding container a3a1adff309cd776a3fdcab84322b28deaba83836a34a6b2e3dafae00cc9941e: Status 404 returned error can't find the container with id a3a1adff309cd776a3fdcab84322b28deaba83836a34a6b2e3dafae00cc9941e Dec 03 18:19:43 crc kubenswrapper[5002]: I1203 18:19:43.492819 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f","Type":"ContainerStarted","Data":"a3a1adff309cd776a3fdcab84322b28deaba83836a34a6b2e3dafae00cc9941e"} Dec 03 18:19:44 crc kubenswrapper[5002]: I1203 18:19:44.503588 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f","Type":"ContainerStarted","Data":"a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43"} Dec 03 18:19:44 crc kubenswrapper[5002]: I1203 18:19:44.503631 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f","Type":"ContainerStarted","Data":"f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40"} Dec 03 18:19:45 crc kubenswrapper[5002]: I1203 18:19:45.517540 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f","Type":"ContainerStarted","Data":"634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7"} Dec 03 18:19:47 crc kubenswrapper[5002]: I1203 18:19:47.316784 5002 scope.go:117] "RemoveContainer" containerID="5eed70dbc21661b42ffba0dded5332b64d8d67e1bbb1ace10124a7d368c0a16d" Dec 03 18:19:47 crc kubenswrapper[5002]: I1203 18:19:47.349105 5002 scope.go:117] "RemoveContainer" containerID="eb1bdfb237ef6a1f4425a65f2fd6e1176696c5b59741fefdeb54b6ba1ad90c9f" Dec 03 18:19:47 crc kubenswrapper[5002]: I1203 18:19:47.407639 5002 scope.go:117] "RemoveContainer" containerID="4d74ffe2f20bfcef2741b38b7c4b2507a6b3d1671f08811e15470837334cddb9" Dec 03 18:19:47 crc kubenswrapper[5002]: I1203 18:19:47.458135 5002 scope.go:117] "RemoveContainer" containerID="87dccdb4ff5d9a43a7000fd9d13479fd43c9223a381228068491e97cbaf57f4f" Dec 03 18:19:47 crc kubenswrapper[5002]: I1203 18:19:47.553497 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f","Type":"ContainerStarted","Data":"3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893"} Dec 03 18:19:47 crc kubenswrapper[5002]: I1203 18:19:47.555321 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 18:19:47 crc kubenswrapper[5002]: I1203 18:19:47.580580 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.228870411 podStartE2EDuration="5.580561994s" podCreationTimestamp="2025-12-03 18:19:42 +0000 UTC" firstStartedPulling="2025-12-03 18:19:43.03389047 +0000 UTC m=+6506.447712358" lastFinishedPulling="2025-12-03 18:19:46.385582063 +0000 UTC m=+6509.799403941" observedRunningTime="2025-12-03 18:19:47.577370928 +0000 UTC m=+6510.991192816" watchObservedRunningTime="2025-12-03 18:19:47.580561994 +0000 UTC m=+6510.994383882" Dec 03 18:19:47 crc kubenswrapper[5002]: I1203 18:19:47.642616 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:47 crc kubenswrapper[5002]: I1203 18:19:47.645238 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:48 crc kubenswrapper[5002]: I1203 18:19:48.569490 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.131063 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.131893 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="ca22aa38-32ed-4e21-aa8b-e0185634506d" containerName="openstackclient" containerID="cri-o://d70907bc3bde979acd4451a27bc2559c5044f813c594ca4d4641eb6375b7ef80" gracePeriod=2 Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.146048 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.170552 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 18:19:50 crc kubenswrapper[5002]: E1203 18:19:50.171121 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca22aa38-32ed-4e21-aa8b-e0185634506d" containerName="openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.171145 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca22aa38-32ed-4e21-aa8b-e0185634506d" containerName="openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.171434 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca22aa38-32ed-4e21-aa8b-e0185634506d" containerName="openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.174143 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.187051 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.201652 5002 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="ca22aa38-32ed-4e21-aa8b-e0185634506d" podUID="b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.206320 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50-openstack-config-secret\") pod \"openstackclient\" (UID: \"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50\") " pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.206476 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50-openstack-config\") pod \"openstackclient\" (UID: \"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50\") " pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.206905 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2bkm\" (UniqueName: \"kubernetes.io/projected/b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50-kube-api-access-v2bkm\") pod \"openstackclient\" (UID: \"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50\") " pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.207142 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50\") " pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.309174 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2bkm\" (UniqueName: \"kubernetes.io/projected/b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50-kube-api-access-v2bkm\") pod \"openstackclient\" (UID: \"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50\") " pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.309294 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50\") " pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.309350 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50-openstack-config-secret\") pod \"openstackclient\" (UID: \"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50\") " pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.309413 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50-openstack-config\") pod \"openstackclient\" (UID: \"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50\") " pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.310285 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50-openstack-config\") pod \"openstackclient\" (UID: \"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50\") " pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.318825 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50\") " pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.320879 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50-openstack-config-secret\") pod \"openstackclient\" (UID: \"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50\") " pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.328180 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2bkm\" (UniqueName: \"kubernetes.io/projected/b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50-kube-api-access-v2bkm\") pod \"openstackclient\" (UID: \"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50\") " pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.501043 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.926004 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.926370 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.926415 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.927263 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7911f9c0b193aa85df29cc29463813c7f08c105d2a294d7ed9ab3be4f79d1c9b"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 18:19:50 crc kubenswrapper[5002]: I1203 18:19:50.927308 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://7911f9c0b193aa85df29cc29463813c7f08c105d2a294d7ed9ab3be4f79d1c9b" gracePeriod=600 Dec 03 18:19:51 crc kubenswrapper[5002]: E1203 18:19:51.176619 5002 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1d64ada_fbf9_4b0e_abb6_9b29bfec7309.slice/crio-7911f9c0b193aa85df29cc29463813c7f08c105d2a294d7ed9ab3be4f79d1c9b.scope\": RecentStats: unable to find data in memory cache]" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.274097 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 18:19:51 crc kubenswrapper[5002]: W1203 18:19:51.279695 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb67cb4b3_26bf_46b9_9f1e_e0ef8abdcd50.slice/crio-602550a9f23e80a48e0b60eca195b3abb6fe5daa8d83e153301c99872cf5cd0c WatchSource:0}: Error finding container 602550a9f23e80a48e0b60eca195b3abb6fe5daa8d83e153301c99872cf5cd0c: Status 404 returned error can't find the container with id 602550a9f23e80a48e0b60eca195b3abb6fe5daa8d83e153301c99872cf5cd0c Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.553954 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-rbf5d"] Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.559604 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-rbf5d" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.565310 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-rbf5d"] Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.606949 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50","Type":"ContainerStarted","Data":"602550a9f23e80a48e0b60eca195b3abb6fe5daa8d83e153301c99872cf5cd0c"} Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.609980 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="7911f9c0b193aa85df29cc29463813c7f08c105d2a294d7ed9ab3be4f79d1c9b" exitCode=0 Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.610027 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"7911f9c0b193aa85df29cc29463813c7f08c105d2a294d7ed9ab3be4f79d1c9b"} Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.610060 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a"} Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.610079 5002 scope.go:117] "RemoveContainer" containerID="3813767c58f0216e0a89c487483aeab6718470c85991b4c372bde6f9a19dc7b5" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.657708 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72mkj\" (UniqueName: \"kubernetes.io/projected/6acc2704-72e1-4467-932f-8cb49c2eb422-kube-api-access-72mkj\") pod \"aodh-db-create-rbf5d\" (UID: \"6acc2704-72e1-4467-932f-8cb49c2eb422\") " pod="openstack/aodh-db-create-rbf5d" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.658113 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acc2704-72e1-4467-932f-8cb49c2eb422-operator-scripts\") pod \"aodh-db-create-rbf5d\" (UID: \"6acc2704-72e1-4467-932f-8cb49c2eb422\") " pod="openstack/aodh-db-create-rbf5d" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.760526 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acc2704-72e1-4467-932f-8cb49c2eb422-operator-scripts\") pod \"aodh-db-create-rbf5d\" (UID: \"6acc2704-72e1-4467-932f-8cb49c2eb422\") " pod="openstack/aodh-db-create-rbf5d" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.760709 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72mkj\" (UniqueName: \"kubernetes.io/projected/6acc2704-72e1-4467-932f-8cb49c2eb422-kube-api-access-72mkj\") pod \"aodh-db-create-rbf5d\" (UID: \"6acc2704-72e1-4467-932f-8cb49c2eb422\") " pod="openstack/aodh-db-create-rbf5d" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.761637 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acc2704-72e1-4467-932f-8cb49c2eb422-operator-scripts\") pod \"aodh-db-create-rbf5d\" (UID: \"6acc2704-72e1-4467-932f-8cb49c2eb422\") " pod="openstack/aodh-db-create-rbf5d" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.765800 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-6a1c-account-create-update-5dx2j"] Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.767167 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-6a1c-account-create-update-5dx2j" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.773184 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.791680 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72mkj\" (UniqueName: \"kubernetes.io/projected/6acc2704-72e1-4467-932f-8cb49c2eb422-kube-api-access-72mkj\") pod \"aodh-db-create-rbf5d\" (UID: \"6acc2704-72e1-4467-932f-8cb49c2eb422\") " pod="openstack/aodh-db-create-rbf5d" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.807857 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-6a1c-account-create-update-5dx2j"] Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.864106 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74bd124f-de70-4274-86be-640d56813b9f-operator-scripts\") pod \"aodh-6a1c-account-create-update-5dx2j\" (UID: \"74bd124f-de70-4274-86be-640d56813b9f\") " pod="openstack/aodh-6a1c-account-create-update-5dx2j" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.864179 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkzrm\" (UniqueName: \"kubernetes.io/projected/74bd124f-de70-4274-86be-640d56813b9f-kube-api-access-rkzrm\") pod \"aodh-6a1c-account-create-update-5dx2j\" (UID: \"74bd124f-de70-4274-86be-640d56813b9f\") " pod="openstack/aodh-6a1c-account-create-update-5dx2j" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.897441 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-rbf5d" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.966259 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74bd124f-de70-4274-86be-640d56813b9f-operator-scripts\") pod \"aodh-6a1c-account-create-update-5dx2j\" (UID: \"74bd124f-de70-4274-86be-640d56813b9f\") " pod="openstack/aodh-6a1c-account-create-update-5dx2j" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.966455 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkzrm\" (UniqueName: \"kubernetes.io/projected/74bd124f-de70-4274-86be-640d56813b9f-kube-api-access-rkzrm\") pod \"aodh-6a1c-account-create-update-5dx2j\" (UID: \"74bd124f-de70-4274-86be-640d56813b9f\") " pod="openstack/aodh-6a1c-account-create-update-5dx2j" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.969565 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74bd124f-de70-4274-86be-640d56813b9f-operator-scripts\") pod \"aodh-6a1c-account-create-update-5dx2j\" (UID: \"74bd124f-de70-4274-86be-640d56813b9f\") " pod="openstack/aodh-6a1c-account-create-update-5dx2j" Dec 03 18:19:51 crc kubenswrapper[5002]: I1203 18:19:51.992475 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkzrm\" (UniqueName: \"kubernetes.io/projected/74bd124f-de70-4274-86be-640d56813b9f-kube-api-access-rkzrm\") pod \"aodh-6a1c-account-create-update-5dx2j\" (UID: \"74bd124f-de70-4274-86be-640d56813b9f\") " pod="openstack/aodh-6a1c-account-create-update-5dx2j" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.164627 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-6a1c-account-create-update-5dx2j" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.198695 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.202209 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="prometheus" containerID="cri-o://2266fb3b93c4dabc7695b805f1d4f9d9e815dcbf0fc8c0c31007216c777cdabf" gracePeriod=600 Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.202263 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="thanos-sidecar" containerID="cri-o://21b99bcd07b23d0ec26b14a0fe9bfa20aa11d3a53142ec762efa533c392f7503" gracePeriod=600 Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.202308 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="config-reloader" containerID="cri-o://8f04c990454a7143628a57e3e72d2e85fae5b4d1ab4884e346fe28e01618d31e" gracePeriod=600 Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.621510 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-rbf5d"] Dec 03 18:19:52 crc kubenswrapper[5002]: W1203 18:19:52.641853 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6acc2704_72e1_4467_932f_8cb49c2eb422.slice/crio-22c6d4a58c7fd2572478efae8d9ea5f50f0d0f8f46a0843b7b4d2de64e516f7d WatchSource:0}: Error finding container 22c6d4a58c7fd2572478efae8d9ea5f50f0d0f8f46a0843b7b4d2de64e516f7d: Status 404 returned error can't find the container with id 22c6d4a58c7fd2572478efae8d9ea5f50f0d0f8f46a0843b7b4d2de64e516f7d Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.643864 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.1.139:9090/-/ready\": dial tcp 10.217.1.139:9090: connect: connection refused" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.666876 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50","Type":"ContainerStarted","Data":"d2bc65589f3e50f11b3d34d864f5b1e68afb25fc32f6337f053e30a9fbcb8b99"} Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.674542 5002 generic.go:334] "Generic (PLEG): container finished" podID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerID="21b99bcd07b23d0ec26b14a0fe9bfa20aa11d3a53142ec762efa533c392f7503" exitCode=0 Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.674571 5002 generic.go:334] "Generic (PLEG): container finished" podID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerID="8f04c990454a7143628a57e3e72d2e85fae5b4d1ab4884e346fe28e01618d31e" exitCode=0 Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.674579 5002 generic.go:334] "Generic (PLEG): container finished" podID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerID="2266fb3b93c4dabc7695b805f1d4f9d9e815dcbf0fc8c0c31007216c777cdabf" exitCode=0 Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.674606 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"446440c7-e1a5-453c-b350-afee8d3a1f80","Type":"ContainerDied","Data":"21b99bcd07b23d0ec26b14a0fe9bfa20aa11d3a53142ec762efa533c392f7503"} Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.674646 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"446440c7-e1a5-453c-b350-afee8d3a1f80","Type":"ContainerDied","Data":"8f04c990454a7143628a57e3e72d2e85fae5b4d1ab4884e346fe28e01618d31e"} Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.674672 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"446440c7-e1a5-453c-b350-afee8d3a1f80","Type":"ContainerDied","Data":"2266fb3b93c4dabc7695b805f1d4f9d9e815dcbf0fc8c0c31007216c777cdabf"} Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.692872 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.692853856 podStartE2EDuration="2.692853856s" podCreationTimestamp="2025-12-03 18:19:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:19:52.683774922 +0000 UTC m=+6516.097596830" watchObservedRunningTime="2025-12-03 18:19:52.692853856 +0000 UTC m=+6516.106675734" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.702265 5002 generic.go:334] "Generic (PLEG): container finished" podID="ca22aa38-32ed-4e21-aa8b-e0185634506d" containerID="d70907bc3bde979acd4451a27bc2559c5044f813c594ca4d4641eb6375b7ef80" exitCode=137 Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.702329 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a589f032c62c0cc344dd22930ae4572bd450e73b7ce0aa0c52deeb4ced1f45a" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.714646 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.781103 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca22aa38-32ed-4e21-aa8b-e0185634506d-combined-ca-bundle\") pod \"ca22aa38-32ed-4e21-aa8b-e0185634506d\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.781177 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ca22aa38-32ed-4e21-aa8b-e0185634506d-openstack-config-secret\") pod \"ca22aa38-32ed-4e21-aa8b-e0185634506d\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.781219 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdm7b\" (UniqueName: \"kubernetes.io/projected/ca22aa38-32ed-4e21-aa8b-e0185634506d-kube-api-access-cdm7b\") pod \"ca22aa38-32ed-4e21-aa8b-e0185634506d\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.781424 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ca22aa38-32ed-4e21-aa8b-e0185634506d-openstack-config\") pod \"ca22aa38-32ed-4e21-aa8b-e0185634506d\" (UID: \"ca22aa38-32ed-4e21-aa8b-e0185634506d\") " Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.791684 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca22aa38-32ed-4e21-aa8b-e0185634506d-kube-api-access-cdm7b" (OuterVolumeSpecName: "kube-api-access-cdm7b") pod "ca22aa38-32ed-4e21-aa8b-e0185634506d" (UID: "ca22aa38-32ed-4e21-aa8b-e0185634506d"). InnerVolumeSpecName "kube-api-access-cdm7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.828324 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca22aa38-32ed-4e21-aa8b-e0185634506d-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "ca22aa38-32ed-4e21-aa8b-e0185634506d" (UID: "ca22aa38-32ed-4e21-aa8b-e0185634506d"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.843152 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca22aa38-32ed-4e21-aa8b-e0185634506d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca22aa38-32ed-4e21-aa8b-e0185634506d" (UID: "ca22aa38-32ed-4e21-aa8b-e0185634506d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.883821 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca22aa38-32ed-4e21-aa8b-e0185634506d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.884340 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdm7b\" (UniqueName: \"kubernetes.io/projected/ca22aa38-32ed-4e21-aa8b-e0185634506d-kube-api-access-cdm7b\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.884510 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ca22aa38-32ed-4e21-aa8b-e0185634506d-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.893364 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-6a1c-account-create-update-5dx2j"] Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.946508 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca22aa38-32ed-4e21-aa8b-e0185634506d-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "ca22aa38-32ed-4e21-aa8b-e0185634506d" (UID: "ca22aa38-32ed-4e21-aa8b-e0185634506d"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:19:52 crc kubenswrapper[5002]: I1203 18:19:52.988003 5002 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ca22aa38-32ed-4e21-aa8b-e0185634506d-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.271675 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.403078 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-config\") pod \"446440c7-e1a5-453c-b350-afee8d3a1f80\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.403483 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-thanos-prometheus-http-client-file\") pod \"446440c7-e1a5-453c-b350-afee8d3a1f80\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.403538 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bsqbg\" (UniqueName: \"kubernetes.io/projected/446440c7-e1a5-453c-b350-afee8d3a1f80-kube-api-access-bsqbg\") pod \"446440c7-e1a5-453c-b350-afee8d3a1f80\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.403653 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-web-config\") pod \"446440c7-e1a5-453c-b350-afee8d3a1f80\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.403784 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/446440c7-e1a5-453c-b350-afee8d3a1f80-config-out\") pod \"446440c7-e1a5-453c-b350-afee8d3a1f80\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.403837 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/446440c7-e1a5-453c-b350-afee8d3a1f80-prometheus-metric-storage-rulefiles-0\") pod \"446440c7-e1a5-453c-b350-afee8d3a1f80\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.403875 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/446440c7-e1a5-453c-b350-afee8d3a1f80-tls-assets\") pod \"446440c7-e1a5-453c-b350-afee8d3a1f80\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.404076 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\") pod \"446440c7-e1a5-453c-b350-afee8d3a1f80\" (UID: \"446440c7-e1a5-453c-b350-afee8d3a1f80\") " Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.406999 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/446440c7-e1a5-453c-b350-afee8d3a1f80-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "446440c7-e1a5-453c-b350-afee8d3a1f80" (UID: "446440c7-e1a5-453c-b350-afee8d3a1f80"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.409902 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "446440c7-e1a5-453c-b350-afee8d3a1f80" (UID: "446440c7-e1a5-453c-b350-afee8d3a1f80"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.411018 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/446440c7-e1a5-453c-b350-afee8d3a1f80-config-out" (OuterVolumeSpecName: "config-out") pod "446440c7-e1a5-453c-b350-afee8d3a1f80" (UID: "446440c7-e1a5-453c-b350-afee8d3a1f80"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.411093 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/446440c7-e1a5-453c-b350-afee8d3a1f80-kube-api-access-bsqbg" (OuterVolumeSpecName: "kube-api-access-bsqbg") pod "446440c7-e1a5-453c-b350-afee8d3a1f80" (UID: "446440c7-e1a5-453c-b350-afee8d3a1f80"). InnerVolumeSpecName "kube-api-access-bsqbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.412914 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-config" (OuterVolumeSpecName: "config") pod "446440c7-e1a5-453c-b350-afee8d3a1f80" (UID: "446440c7-e1a5-453c-b350-afee8d3a1f80"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.413845 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/446440c7-e1a5-453c-b350-afee8d3a1f80-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "446440c7-e1a5-453c-b350-afee8d3a1f80" (UID: "446440c7-e1a5-453c-b350-afee8d3a1f80"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.473059 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-web-config" (OuterVolumeSpecName: "web-config") pod "446440c7-e1a5-453c-b350-afee8d3a1f80" (UID: "446440c7-e1a5-453c-b350-afee8d3a1f80"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.488802 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "446440c7-e1a5-453c-b350-afee8d3a1f80" (UID: "446440c7-e1a5-453c-b350-afee8d3a1f80"). InnerVolumeSpecName "pvc-5deacb4a-08c0-498d-a561-e37ea73e047d". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.507939 5002 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.507985 5002 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.508001 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bsqbg\" (UniqueName: \"kubernetes.io/projected/446440c7-e1a5-453c-b350-afee8d3a1f80-kube-api-access-bsqbg\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.508015 5002 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/446440c7-e1a5-453c-b350-afee8d3a1f80-web-config\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.508031 5002 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/446440c7-e1a5-453c-b350-afee8d3a1f80-config-out\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.508042 5002 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/446440c7-e1a5-453c-b350-afee8d3a1f80-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.508054 5002 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/446440c7-e1a5-453c-b350-afee8d3a1f80-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.508100 5002 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\") on node \"crc\" " Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.578148 5002 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.578328 5002 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-5deacb4a-08c0-498d-a561-e37ea73e047d" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d") on node "crc" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.613323 5002 reconciler_common.go:293] "Volume detached for volume \"pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.714506 5002 generic.go:334] "Generic (PLEG): container finished" podID="74bd124f-de70-4274-86be-640d56813b9f" containerID="aa3a6248bb956a638899d6e4144dc1d07e694f900414fd37cf9ac8ae3cb05e93" exitCode=0 Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.714646 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-6a1c-account-create-update-5dx2j" event={"ID":"74bd124f-de70-4274-86be-640d56813b9f","Type":"ContainerDied","Data":"aa3a6248bb956a638899d6e4144dc1d07e694f900414fd37cf9ac8ae3cb05e93"} Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.714691 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-6a1c-account-create-update-5dx2j" event={"ID":"74bd124f-de70-4274-86be-640d56813b9f","Type":"ContainerStarted","Data":"47af811141b54098e24987bb2b0dc7568551eb820451f53c7df097be832ded2a"} Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.717028 5002 generic.go:334] "Generic (PLEG): container finished" podID="6acc2704-72e1-4467-932f-8cb49c2eb422" containerID="c2c4ef341a19b1f5fd7c1a6c2ced9a6699f6b383ee6a8b4cb7e71dec25437775" exitCode=0 Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.717081 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-rbf5d" event={"ID":"6acc2704-72e1-4467-932f-8cb49c2eb422","Type":"ContainerDied","Data":"c2c4ef341a19b1f5fd7c1a6c2ced9a6699f6b383ee6a8b4cb7e71dec25437775"} Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.717101 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-rbf5d" event={"ID":"6acc2704-72e1-4467-932f-8cb49c2eb422","Type":"ContainerStarted","Data":"22c6d4a58c7fd2572478efae8d9ea5f50f0d0f8f46a0843b7b4d2de64e516f7d"} Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.720916 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.721952 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"446440c7-e1a5-453c-b350-afee8d3a1f80","Type":"ContainerDied","Data":"fbc8c813926e57e5e6388302e6355490d29f0cb042ad31bed4f995752b095029"} Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.721994 5002 scope.go:117] "RemoveContainer" containerID="21b99bcd07b23d0ec26b14a0fe9bfa20aa11d3a53142ec762efa533c392f7503" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.722138 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.766827 5002 scope.go:117] "RemoveContainer" containerID="8f04c990454a7143628a57e3e72d2e85fae5b4d1ab4884e346fe28e01618d31e" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.804002 5002 scope.go:117] "RemoveContainer" containerID="2266fb3b93c4dabc7695b805f1d4f9d9e815dcbf0fc8c0c31007216c777cdabf" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.810260 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.824365 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.841675 5002 scope.go:117] "RemoveContainer" containerID="1f7dc2610301b6e792e5fddb16de125f76b248db56cf4dd6b96a583acd72282b" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.844959 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 18:19:53 crc kubenswrapper[5002]: E1203 18:19:53.845614 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="init-config-reloader" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.845632 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="init-config-reloader" Dec 03 18:19:53 crc kubenswrapper[5002]: E1203 18:19:53.845651 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="prometheus" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.845659 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="prometheus" Dec 03 18:19:53 crc kubenswrapper[5002]: E1203 18:19:53.845684 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="config-reloader" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.845692 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="config-reloader" Dec 03 18:19:53 crc kubenswrapper[5002]: E1203 18:19:53.845705 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="thanos-sidecar" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.845713 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="thanos-sidecar" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.846010 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="prometheus" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.846033 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="thanos-sidecar" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.846054 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" containerName="config-reloader" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.850226 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.852929 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.853998 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.854605 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-9l22p" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.854865 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.855072 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.861091 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.862051 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 18:19:53 crc kubenswrapper[5002]: I1203 18:19:53.863043 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.021450 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.021963 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.022191 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.022321 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.022426 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-config\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.022603 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.022708 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/aad2275f-1879-49ef-b51f-79efc4cc39bb-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.022789 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.022859 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjv2m\" (UniqueName: \"kubernetes.io/projected/aad2275f-1879-49ef-b51f-79efc4cc39bb-kube-api-access-bjv2m\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.022907 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/aad2275f-1879-49ef-b51f-79efc4cc39bb-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.023062 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/aad2275f-1879-49ef-b51f-79efc4cc39bb-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.125315 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.125377 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.125557 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.125621 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.125648 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-config\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.125692 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.125735 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/aad2275f-1879-49ef-b51f-79efc4cc39bb-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.126016 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.126054 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjv2m\" (UniqueName: \"kubernetes.io/projected/aad2275f-1879-49ef-b51f-79efc4cc39bb-kube-api-access-bjv2m\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.126081 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/aad2275f-1879-49ef-b51f-79efc4cc39bb-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.126118 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/aad2275f-1879-49ef-b51f-79efc4cc39bb-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.127372 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/aad2275f-1879-49ef-b51f-79efc4cc39bb-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.129809 5002 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.129844 5002 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/94371cf951ef3da22c6d29514db5618c55bcf638f61084fc1b944304ccc42026/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.132154 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.132164 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.132519 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/aad2275f-1879-49ef-b51f-79efc4cc39bb-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.132892 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/aad2275f-1879-49ef-b51f-79efc4cc39bb-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.133212 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.134418 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.134738 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.135873 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/aad2275f-1879-49ef-b51f-79efc4cc39bb-config\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.143207 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjv2m\" (UniqueName: \"kubernetes.io/projected/aad2275f-1879-49ef-b51f-79efc4cc39bb-kube-api-access-bjv2m\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.205966 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5deacb4a-08c0-498d-a561-e37ea73e047d\") pod \"prometheus-metric-storage-0\" (UID: \"aad2275f-1879-49ef-b51f-79efc4cc39bb\") " pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.228497 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.824909 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.852738 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="446440c7-e1a5-453c-b350-afee8d3a1f80" path="/var/lib/kubelet/pods/446440c7-e1a5-453c-b350-afee8d3a1f80/volumes" Dec 03 18:19:54 crc kubenswrapper[5002]: I1203 18:19:54.853680 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca22aa38-32ed-4e21-aa8b-e0185634506d" path="/var/lib/kubelet/pods/ca22aa38-32ed-4e21-aa8b-e0185634506d/volumes" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.096192 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-6a1c-account-create-update-5dx2j" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.147849 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74bd124f-de70-4274-86be-640d56813b9f-operator-scripts\") pod \"74bd124f-de70-4274-86be-640d56813b9f\" (UID: \"74bd124f-de70-4274-86be-640d56813b9f\") " Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.148233 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkzrm\" (UniqueName: \"kubernetes.io/projected/74bd124f-de70-4274-86be-640d56813b9f-kube-api-access-rkzrm\") pod \"74bd124f-de70-4274-86be-640d56813b9f\" (UID: \"74bd124f-de70-4274-86be-640d56813b9f\") " Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.148580 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74bd124f-de70-4274-86be-640d56813b9f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "74bd124f-de70-4274-86be-640d56813b9f" (UID: "74bd124f-de70-4274-86be-640d56813b9f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.148861 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74bd124f-de70-4274-86be-640d56813b9f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.153162 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74bd124f-de70-4274-86be-640d56813b9f-kube-api-access-rkzrm" (OuterVolumeSpecName: "kube-api-access-rkzrm") pod "74bd124f-de70-4274-86be-640d56813b9f" (UID: "74bd124f-de70-4274-86be-640d56813b9f"). InnerVolumeSpecName "kube-api-access-rkzrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.230171 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-rbf5d" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.251347 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkzrm\" (UniqueName: \"kubernetes.io/projected/74bd124f-de70-4274-86be-640d56813b9f-kube-api-access-rkzrm\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.352712 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72mkj\" (UniqueName: \"kubernetes.io/projected/6acc2704-72e1-4467-932f-8cb49c2eb422-kube-api-access-72mkj\") pod \"6acc2704-72e1-4467-932f-8cb49c2eb422\" (UID: \"6acc2704-72e1-4467-932f-8cb49c2eb422\") " Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.352944 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acc2704-72e1-4467-932f-8cb49c2eb422-operator-scripts\") pod \"6acc2704-72e1-4467-932f-8cb49c2eb422\" (UID: \"6acc2704-72e1-4467-932f-8cb49c2eb422\") " Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.353398 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6acc2704-72e1-4467-932f-8cb49c2eb422-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6acc2704-72e1-4467-932f-8cb49c2eb422" (UID: "6acc2704-72e1-4467-932f-8cb49c2eb422"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.353651 5002 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acc2704-72e1-4467-932f-8cb49c2eb422-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.359033 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6acc2704-72e1-4467-932f-8cb49c2eb422-kube-api-access-72mkj" (OuterVolumeSpecName: "kube-api-access-72mkj") pod "6acc2704-72e1-4467-932f-8cb49c2eb422" (UID: "6acc2704-72e1-4467-932f-8cb49c2eb422"). InnerVolumeSpecName "kube-api-access-72mkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.455653 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72mkj\" (UniqueName: \"kubernetes.io/projected/6acc2704-72e1-4467-932f-8cb49c2eb422-kube-api-access-72mkj\") on node \"crc\" DevicePath \"\"" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.739635 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-6a1c-account-create-update-5dx2j" event={"ID":"74bd124f-de70-4274-86be-640d56813b9f","Type":"ContainerDied","Data":"47af811141b54098e24987bb2b0dc7568551eb820451f53c7df097be832ded2a"} Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.739675 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47af811141b54098e24987bb2b0dc7568551eb820451f53c7df097be832ded2a" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.739724 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-6a1c-account-create-update-5dx2j" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.754040 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-rbf5d" event={"ID":"6acc2704-72e1-4467-932f-8cb49c2eb422","Type":"ContainerDied","Data":"22c6d4a58c7fd2572478efae8d9ea5f50f0d0f8f46a0843b7b4d2de64e516f7d"} Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.754090 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22c6d4a58c7fd2572478efae8d9ea5f50f0d0f8f46a0843b7b4d2de64e516f7d" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.754147 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-rbf5d" Dec 03 18:19:55 crc kubenswrapper[5002]: I1203 18:19:55.772863 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"aad2275f-1879-49ef-b51f-79efc4cc39bb","Type":"ContainerStarted","Data":"0d85ae176e8b6a777d56cb9d81345e1c0925007a195223faf3259b34729ab416"} Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.093821 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-4ndlh"] Dec 03 18:19:57 crc kubenswrapper[5002]: E1203 18:19:57.094896 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6acc2704-72e1-4467-932f-8cb49c2eb422" containerName="mariadb-database-create" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.094913 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="6acc2704-72e1-4467-932f-8cb49c2eb422" containerName="mariadb-database-create" Dec 03 18:19:57 crc kubenswrapper[5002]: E1203 18:19:57.094945 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74bd124f-de70-4274-86be-640d56813b9f" containerName="mariadb-account-create-update" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.094954 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="74bd124f-de70-4274-86be-640d56813b9f" containerName="mariadb-account-create-update" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.095220 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="74bd124f-de70-4274-86be-640d56813b9f" containerName="mariadb-account-create-update" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.095244 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="6acc2704-72e1-4467-932f-8cb49c2eb422" containerName="mariadb-database-create" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.096160 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.100723 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-zznvp" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.100946 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.101069 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.101408 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.107170 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-4ndlh"] Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.191804 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-scripts\") pod \"aodh-db-sync-4ndlh\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.191856 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-combined-ca-bundle\") pod \"aodh-db-sync-4ndlh\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.192054 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-config-data\") pod \"aodh-db-sync-4ndlh\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.192200 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szv7z\" (UniqueName: \"kubernetes.io/projected/be1e8c59-f401-4ea7-aada-c65cb303729a-kube-api-access-szv7z\") pod \"aodh-db-sync-4ndlh\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.295006 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-config-data\") pod \"aodh-db-sync-4ndlh\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.295088 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szv7z\" (UniqueName: \"kubernetes.io/projected/be1e8c59-f401-4ea7-aada-c65cb303729a-kube-api-access-szv7z\") pod \"aodh-db-sync-4ndlh\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.295266 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-scripts\") pod \"aodh-db-sync-4ndlh\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.295333 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-combined-ca-bundle\") pod \"aodh-db-sync-4ndlh\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.302245 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-config-data\") pod \"aodh-db-sync-4ndlh\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.302550 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-scripts\") pod \"aodh-db-sync-4ndlh\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.314303 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szv7z\" (UniqueName: \"kubernetes.io/projected/be1e8c59-f401-4ea7-aada-c65cb303729a-kube-api-access-szv7z\") pod \"aodh-db-sync-4ndlh\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.315284 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-combined-ca-bundle\") pod \"aodh-db-sync-4ndlh\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.425479 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:19:57 crc kubenswrapper[5002]: I1203 18:19:57.927475 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-4ndlh"] Dec 03 18:19:58 crc kubenswrapper[5002]: W1203 18:19:58.044878 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe1e8c59_f401_4ea7_aada_c65cb303729a.slice/crio-61b48f8f004a7c80cac7f81dba2c397518534f6f15bf5770c7971e1d0023c43a WatchSource:0}: Error finding container 61b48f8f004a7c80cac7f81dba2c397518534f6f15bf5770c7971e1d0023c43a: Status 404 returned error can't find the container with id 61b48f8f004a7c80cac7f81dba2c397518534f6f15bf5770c7971e1d0023c43a Dec 03 18:19:58 crc kubenswrapper[5002]: I1203 18:19:58.805690 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-4ndlh" event={"ID":"be1e8c59-f401-4ea7-aada-c65cb303729a","Type":"ContainerStarted","Data":"61b48f8f004a7c80cac7f81dba2c397518534f6f15bf5770c7971e1d0023c43a"} Dec 03 18:19:58 crc kubenswrapper[5002]: I1203 18:19:58.818972 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"aad2275f-1879-49ef-b51f-79efc4cc39bb","Type":"ContainerStarted","Data":"b66e5e24200fb185554ec25be4ea83d911b4edfa15ee66f1daf36bee081befd9"} Dec 03 18:20:02 crc kubenswrapper[5002]: I1203 18:20:02.859836 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-4ndlh" event={"ID":"be1e8c59-f401-4ea7-aada-c65cb303729a","Type":"ContainerStarted","Data":"d9dbe9eea4ececcb0db6edaf653d16331c309e51d7e2377db15889a711d36c5d"} Dec 03 18:20:02 crc kubenswrapper[5002]: I1203 18:20:02.877959 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-4ndlh" podStartSLOduration=1.360414106 podStartE2EDuration="5.877942516s" podCreationTimestamp="2025-12-03 18:19:57 +0000 UTC" firstStartedPulling="2025-12-03 18:19:58.049326426 +0000 UTC m=+6521.463148314" lastFinishedPulling="2025-12-03 18:20:02.566854836 +0000 UTC m=+6525.980676724" observedRunningTime="2025-12-03 18:20:02.87478265 +0000 UTC m=+6526.288604538" watchObservedRunningTime="2025-12-03 18:20:02.877942516 +0000 UTC m=+6526.291764404" Dec 03 18:20:04 crc kubenswrapper[5002]: I1203 18:20:04.882782 5002 generic.go:334] "Generic (PLEG): container finished" podID="be1e8c59-f401-4ea7-aada-c65cb303729a" containerID="d9dbe9eea4ececcb0db6edaf653d16331c309e51d7e2377db15889a711d36c5d" exitCode=0 Dec 03 18:20:04 crc kubenswrapper[5002]: I1203 18:20:04.882831 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-4ndlh" event={"ID":"be1e8c59-f401-4ea7-aada-c65cb303729a","Type":"ContainerDied","Data":"d9dbe9eea4ececcb0db6edaf653d16331c309e51d7e2377db15889a711d36c5d"} Dec 03 18:20:05 crc kubenswrapper[5002]: I1203 18:20:05.893244 5002 generic.go:334] "Generic (PLEG): container finished" podID="aad2275f-1879-49ef-b51f-79efc4cc39bb" containerID="b66e5e24200fb185554ec25be4ea83d911b4edfa15ee66f1daf36bee081befd9" exitCode=0 Dec 03 18:20:05 crc kubenswrapper[5002]: I1203 18:20:05.893332 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"aad2275f-1879-49ef-b51f-79efc4cc39bb","Type":"ContainerDied","Data":"b66e5e24200fb185554ec25be4ea83d911b4edfa15ee66f1daf36bee081befd9"} Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.354257 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.396393 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-scripts\") pod \"be1e8c59-f401-4ea7-aada-c65cb303729a\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.396452 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-config-data\") pod \"be1e8c59-f401-4ea7-aada-c65cb303729a\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.396485 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szv7z\" (UniqueName: \"kubernetes.io/projected/be1e8c59-f401-4ea7-aada-c65cb303729a-kube-api-access-szv7z\") pod \"be1e8c59-f401-4ea7-aada-c65cb303729a\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.397716 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-combined-ca-bundle\") pod \"be1e8c59-f401-4ea7-aada-c65cb303729a\" (UID: \"be1e8c59-f401-4ea7-aada-c65cb303729a\") " Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.401813 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-scripts" (OuterVolumeSpecName: "scripts") pod "be1e8c59-f401-4ea7-aada-c65cb303729a" (UID: "be1e8c59-f401-4ea7-aada-c65cb303729a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.404260 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be1e8c59-f401-4ea7-aada-c65cb303729a-kube-api-access-szv7z" (OuterVolumeSpecName: "kube-api-access-szv7z") pod "be1e8c59-f401-4ea7-aada-c65cb303729a" (UID: "be1e8c59-f401-4ea7-aada-c65cb303729a"). InnerVolumeSpecName "kube-api-access-szv7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.429912 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-config-data" (OuterVolumeSpecName: "config-data") pod "be1e8c59-f401-4ea7-aada-c65cb303729a" (UID: "be1e8c59-f401-4ea7-aada-c65cb303729a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.435946 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be1e8c59-f401-4ea7-aada-c65cb303729a" (UID: "be1e8c59-f401-4ea7-aada-c65cb303729a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.500548 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.500581 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.500591 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be1e8c59-f401-4ea7-aada-c65cb303729a-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.500600 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szv7z\" (UniqueName: \"kubernetes.io/projected/be1e8c59-f401-4ea7-aada-c65cb303729a-kube-api-access-szv7z\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.913051 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-4ndlh" event={"ID":"be1e8c59-f401-4ea7-aada-c65cb303729a","Type":"ContainerDied","Data":"61b48f8f004a7c80cac7f81dba2c397518534f6f15bf5770c7971e1d0023c43a"} Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.913378 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61b48f8f004a7c80cac7f81dba2c397518534f6f15bf5770c7971e1d0023c43a" Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.913469 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-4ndlh" Dec 03 18:20:06 crc kubenswrapper[5002]: I1203 18:20:06.937917 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"aad2275f-1879-49ef-b51f-79efc4cc39bb","Type":"ContainerStarted","Data":"574b1e743996d85a5b65a5b697d8fce094cc2cbdfbd58fc2ba6f3f37a491676e"} Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.170955 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Dec 03 18:20:07 crc kubenswrapper[5002]: E1203 18:20:07.171618 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be1e8c59-f401-4ea7-aada-c65cb303729a" containerName="aodh-db-sync" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.171634 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="be1e8c59-f401-4ea7-aada-c65cb303729a" containerName="aodh-db-sync" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.171932 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="be1e8c59-f401-4ea7-aada-c65cb303729a" containerName="aodh-db-sync" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.178844 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.182807 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-zznvp" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.183109 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.196893 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.210631 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.215942 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dntf\" (UniqueName: \"kubernetes.io/projected/9bb398a0-a1a6-4f3a-98b2-5990a780a670-kube-api-access-2dntf\") pod \"aodh-0\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.216011 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-scripts\") pod \"aodh-0\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.216131 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-combined-ca-bundle\") pod \"aodh-0\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.216154 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-config-data\") pod \"aodh-0\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.319364 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-combined-ca-bundle\") pod \"aodh-0\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.319413 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-config-data\") pod \"aodh-0\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.319478 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dntf\" (UniqueName: \"kubernetes.io/projected/9bb398a0-a1a6-4f3a-98b2-5990a780a670-kube-api-access-2dntf\") pod \"aodh-0\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.319519 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-scripts\") pod \"aodh-0\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.327803 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-config-data\") pod \"aodh-0\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.329187 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-combined-ca-bundle\") pod \"aodh-0\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.338238 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-scripts\") pod \"aodh-0\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.341461 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dntf\" (UniqueName: \"kubernetes.io/projected/9bb398a0-a1a6-4f3a-98b2-5990a780a670-kube-api-access-2dntf\") pod \"aodh-0\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " pod="openstack/aodh-0" Dec 03 18:20:07 crc kubenswrapper[5002]: I1203 18:20:07.526126 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 18:20:08 crc kubenswrapper[5002]: I1203 18:20:08.059659 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 18:20:08 crc kubenswrapper[5002]: I1203 18:20:08.957312 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9bb398a0-a1a6-4f3a-98b2-5990a780a670","Type":"ContainerStarted","Data":"b8fef8ec3fb89744fb6bb70e60dfc7aab3dbbd2e9578cf1f8e8e8e85104bf20b"} Dec 03 18:20:08 crc kubenswrapper[5002]: I1203 18:20:08.957577 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9bb398a0-a1a6-4f3a-98b2-5990a780a670","Type":"ContainerStarted","Data":"f7376ffe2bd608c500c624c6a163e824f4b990a73227e3a6a2f6b09d60d77bff"} Dec 03 18:20:09 crc kubenswrapper[5002]: I1203 18:20:09.518644 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:20:09 crc kubenswrapper[5002]: I1203 18:20:09.523084 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="ceilometer-central-agent" containerID="cri-o://f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40" gracePeriod=30 Dec 03 18:20:09 crc kubenswrapper[5002]: I1203 18:20:09.523147 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="proxy-httpd" containerID="cri-o://3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893" gracePeriod=30 Dec 03 18:20:09 crc kubenswrapper[5002]: I1203 18:20:09.523167 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="sg-core" containerID="cri-o://634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7" gracePeriod=30 Dec 03 18:20:09 crc kubenswrapper[5002]: I1203 18:20:09.523182 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="ceilometer-notification-agent" containerID="cri-o://a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43" gracePeriod=30 Dec 03 18:20:09 crc kubenswrapper[5002]: I1203 18:20:09.897154 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.1.140:3000/\": read tcp 10.217.0.2:42270->10.217.1.140:3000: read: connection reset by peer" Dec 03 18:20:09 crc kubenswrapper[5002]: I1203 18:20:09.984243 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"aad2275f-1879-49ef-b51f-79efc4cc39bb","Type":"ContainerStarted","Data":"2da4fc5a82b55160078fe6952439da14b1007e63e30a3e9e0892e54da11a20c2"} Dec 03 18:20:09 crc kubenswrapper[5002]: I1203 18:20:09.984291 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"aad2275f-1879-49ef-b51f-79efc4cc39bb","Type":"ContainerStarted","Data":"88bf369b932e5afd71cb006362cb3183e80a89ba57fe3ad9c268c7a0eada20db"} Dec 03 18:20:10 crc kubenswrapper[5002]: I1203 18:20:10.002259 5002 generic.go:334] "Generic (PLEG): container finished" podID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerID="3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893" exitCode=0 Dec 03 18:20:10 crc kubenswrapper[5002]: I1203 18:20:10.002294 5002 generic.go:334] "Generic (PLEG): container finished" podID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerID="634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7" exitCode=2 Dec 03 18:20:10 crc kubenswrapper[5002]: I1203 18:20:10.002319 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f","Type":"ContainerDied","Data":"3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893"} Dec 03 18:20:10 crc kubenswrapper[5002]: I1203 18:20:10.002352 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f","Type":"ContainerDied","Data":"634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7"} Dec 03 18:20:10 crc kubenswrapper[5002]: I1203 18:20:10.017414 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=17.017394348 podStartE2EDuration="17.017394348s" podCreationTimestamp="2025-12-03 18:19:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 18:20:10.016457303 +0000 UTC m=+6533.430279191" watchObservedRunningTime="2025-12-03 18:20:10.017394348 +0000 UTC m=+6533.431216236" Dec 03 18:20:10 crc kubenswrapper[5002]: I1203 18:20:10.579652 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 03 18:20:11 crc kubenswrapper[5002]: I1203 18:20:11.015573 5002 generic.go:334] "Generic (PLEG): container finished" podID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerID="f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40" exitCode=0 Dec 03 18:20:11 crc kubenswrapper[5002]: I1203 18:20:11.015654 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f","Type":"ContainerDied","Data":"f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40"} Dec 03 18:20:11 crc kubenswrapper[5002]: I1203 18:20:11.018066 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9bb398a0-a1a6-4f3a-98b2-5990a780a670","Type":"ContainerStarted","Data":"50db02e4e0d5609acd17c000564079f46f10defd0e01396cf02ee31c4f20f772"} Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.587151 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.652786 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4wxt\" (UniqueName: \"kubernetes.io/projected/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-kube-api-access-h4wxt\") pod \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.652842 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-log-httpd\") pod \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.652877 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-sg-core-conf-yaml\") pod \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.652916 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-scripts\") pod \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.653048 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-combined-ca-bundle\") pod \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.653137 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-config-data\") pod \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.653202 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-run-httpd\") pod \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\" (UID: \"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f\") " Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.653415 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" (UID: "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.653729 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" (UID: "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.653954 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.653967 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.661041 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-kube-api-access-h4wxt" (OuterVolumeSpecName: "kube-api-access-h4wxt") pod "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" (UID: "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f"). InnerVolumeSpecName "kube-api-access-h4wxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.661131 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-scripts" (OuterVolumeSpecName: "scripts") pod "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" (UID: "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.688649 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" (UID: "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.747848 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" (UID: "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.759202 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4wxt\" (UniqueName: \"kubernetes.io/projected/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-kube-api-access-h4wxt\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.759235 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.759244 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.759253 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.783426 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-config-data" (OuterVolumeSpecName: "config-data") pod "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" (UID: "bf88da24-95ba-4fba-88ed-2f8f85fa5e3f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:12 crc kubenswrapper[5002]: I1203 18:20:12.861156 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.068507 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9bb398a0-a1a6-4f3a-98b2-5990a780a670","Type":"ContainerStarted","Data":"b3e2e0a08bfbd1a4f22bd888cff5cd1ac1309c8c7bf4b099981b8d56184adf9c"} Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.076807 5002 generic.go:334] "Generic (PLEG): container finished" podID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerID="a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43" exitCode=0 Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.076853 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f","Type":"ContainerDied","Data":"a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43"} Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.076887 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf88da24-95ba-4fba-88ed-2f8f85fa5e3f","Type":"ContainerDied","Data":"a3a1adff309cd776a3fdcab84322b28deaba83836a34a6b2e3dafae00cc9941e"} Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.076905 5002 scope.go:117] "RemoveContainer" containerID="3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.077122 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.188182 5002 scope.go:117] "RemoveContainer" containerID="634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.191971 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.219731 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.231059 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:20:13 crc kubenswrapper[5002]: E1203 18:20:13.231612 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="ceilometer-notification-agent" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.231639 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="ceilometer-notification-agent" Dec 03 18:20:13 crc kubenswrapper[5002]: E1203 18:20:13.231674 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="proxy-httpd" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.231685 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="proxy-httpd" Dec 03 18:20:13 crc kubenswrapper[5002]: E1203 18:20:13.231699 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="sg-core" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.231709 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="sg-core" Dec 03 18:20:13 crc kubenswrapper[5002]: E1203 18:20:13.231772 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="ceilometer-central-agent" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.231782 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="ceilometer-central-agent" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.232003 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="ceilometer-notification-agent" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.232039 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="proxy-httpd" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.232056 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="ceilometer-central-agent" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.232072 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="sg-core" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.234236 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.236622 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.236956 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.257871 5002 scope.go:117] "RemoveContainer" containerID="a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.261063 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.269147 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2k5sg\" (UniqueName: \"kubernetes.io/projected/95957e23-df9c-4340-a03f-f0728f0fdb10-kube-api-access-2k5sg\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.269206 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95957e23-df9c-4340-a03f-f0728f0fdb10-run-httpd\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.269261 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-config-data\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.269351 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95957e23-df9c-4340-a03f-f0728f0fdb10-log-httpd\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.269406 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.269521 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-scripts\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.269560 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.297872 5002 scope.go:117] "RemoveContainer" containerID="f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.317773 5002 scope.go:117] "RemoveContainer" containerID="3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893" Dec 03 18:20:13 crc kubenswrapper[5002]: E1203 18:20:13.319441 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893\": container with ID starting with 3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893 not found: ID does not exist" containerID="3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.319488 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893"} err="failed to get container status \"3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893\": rpc error: code = NotFound desc = could not find container \"3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893\": container with ID starting with 3c6eb427fbb6912e46fbc70eb35e5bc0d5849cebe96d2096ba724b3983d9b893 not found: ID does not exist" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.319515 5002 scope.go:117] "RemoveContainer" containerID="634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7" Dec 03 18:20:13 crc kubenswrapper[5002]: E1203 18:20:13.320054 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7\": container with ID starting with 634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7 not found: ID does not exist" containerID="634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.320117 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7"} err="failed to get container status \"634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7\": rpc error: code = NotFound desc = could not find container \"634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7\": container with ID starting with 634c3f28052109a7d218a3dbdfb80da0ea7de913bbdeaa5f3ed22e5b97b086a7 not found: ID does not exist" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.320146 5002 scope.go:117] "RemoveContainer" containerID="a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43" Dec 03 18:20:13 crc kubenswrapper[5002]: E1203 18:20:13.320493 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43\": container with ID starting with a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43 not found: ID does not exist" containerID="a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.320544 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43"} err="failed to get container status \"a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43\": rpc error: code = NotFound desc = could not find container \"a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43\": container with ID starting with a854ff135e3094500aebc027049de1c0522b2a0e69c5f54ae81380281345cb43 not found: ID does not exist" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.320580 5002 scope.go:117] "RemoveContainer" containerID="f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40" Dec 03 18:20:13 crc kubenswrapper[5002]: E1203 18:20:13.320917 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40\": container with ID starting with f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40 not found: ID does not exist" containerID="f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.320952 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40"} err="failed to get container status \"f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40\": rpc error: code = NotFound desc = could not find container \"f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40\": container with ID starting with f80b500477d3a913b7a1b0acabee3edade4a57745d65bb92d54c611840007c40 not found: ID does not exist" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.371604 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-scripts\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.371671 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.371791 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2k5sg\" (UniqueName: \"kubernetes.io/projected/95957e23-df9c-4340-a03f-f0728f0fdb10-kube-api-access-2k5sg\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.371826 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95957e23-df9c-4340-a03f-f0728f0fdb10-run-httpd\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.371867 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-config-data\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.371933 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95957e23-df9c-4340-a03f-f0728f0fdb10-log-httpd\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.371980 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.372679 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95957e23-df9c-4340-a03f-f0728f0fdb10-log-httpd\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.372729 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95957e23-df9c-4340-a03f-f0728f0fdb10-run-httpd\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.376150 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.377288 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-config-data\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.379769 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-scripts\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.385282 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.403994 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2k5sg\" (UniqueName: \"kubernetes.io/projected/95957e23-df9c-4340-a03f-f0728f0fdb10-kube-api-access-2k5sg\") pod \"ceilometer-0\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " pod="openstack/ceilometer-0" Dec 03 18:20:13 crc kubenswrapper[5002]: I1203 18:20:13.568118 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 18:20:14 crc kubenswrapper[5002]: I1203 18:20:14.116926 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:20:14 crc kubenswrapper[5002]: W1203 18:20:14.148330 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95957e23_df9c_4340_a03f_f0728f0fdb10.slice/crio-218e1ad655fa9432dc6d2abd51247f89fc29ca9a10e465c3aa12fc3d80df4286 WatchSource:0}: Error finding container 218e1ad655fa9432dc6d2abd51247f89fc29ca9a10e465c3aa12fc3d80df4286: Status 404 returned error can't find the container with id 218e1ad655fa9432dc6d2abd51247f89fc29ca9a10e465c3aa12fc3d80df4286 Dec 03 18:20:14 crc kubenswrapper[5002]: I1203 18:20:14.230501 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 03 18:20:14 crc kubenswrapper[5002]: I1203 18:20:14.853432 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" path="/var/lib/kubelet/pods/bf88da24-95ba-4fba-88ed-2f8f85fa5e3f/volumes" Dec 03 18:20:15 crc kubenswrapper[5002]: I1203 18:20:15.115791 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9bb398a0-a1a6-4f3a-98b2-5990a780a670","Type":"ContainerStarted","Data":"071ea2ac06373127f93e8de1e8ff9fd7d6fc73004f3a49a0020e7309b41e0a62"} Dec 03 18:20:15 crc kubenswrapper[5002]: I1203 18:20:15.116192 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-api" containerID="cri-o://b8fef8ec3fb89744fb6bb70e60dfc7aab3dbbd2e9578cf1f8e8e8e85104bf20b" gracePeriod=30 Dec 03 18:20:15 crc kubenswrapper[5002]: I1203 18:20:15.118061 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-listener" containerID="cri-o://071ea2ac06373127f93e8de1e8ff9fd7d6fc73004f3a49a0020e7309b41e0a62" gracePeriod=30 Dec 03 18:20:15 crc kubenswrapper[5002]: I1203 18:20:15.118129 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-notifier" containerID="cri-o://b3e2e0a08bfbd1a4f22bd888cff5cd1ac1309c8c7bf4b099981b8d56184adf9c" gracePeriod=30 Dec 03 18:20:15 crc kubenswrapper[5002]: I1203 18:20:15.118182 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-evaluator" containerID="cri-o://50db02e4e0d5609acd17c000564079f46f10defd0e01396cf02ee31c4f20f772" gracePeriod=30 Dec 03 18:20:15 crc kubenswrapper[5002]: I1203 18:20:15.127467 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"95957e23-df9c-4340-a03f-f0728f0fdb10","Type":"ContainerStarted","Data":"3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f"} Dec 03 18:20:15 crc kubenswrapper[5002]: I1203 18:20:15.127516 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"95957e23-df9c-4340-a03f-f0728f0fdb10","Type":"ContainerStarted","Data":"218e1ad655fa9432dc6d2abd51247f89fc29ca9a10e465c3aa12fc3d80df4286"} Dec 03 18:20:15 crc kubenswrapper[5002]: I1203 18:20:15.150056 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.05829402 podStartE2EDuration="8.150035548s" podCreationTimestamp="2025-12-03 18:20:07 +0000 UTC" firstStartedPulling="2025-12-03 18:20:08.060158671 +0000 UTC m=+6531.473980549" lastFinishedPulling="2025-12-03 18:20:14.151900169 +0000 UTC m=+6537.565722077" observedRunningTime="2025-12-03 18:20:15.139021922 +0000 UTC m=+6538.552843820" watchObservedRunningTime="2025-12-03 18:20:15.150035548 +0000 UTC m=+6538.563857436" Dec 03 18:20:16 crc kubenswrapper[5002]: I1203 18:20:16.138544 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"95957e23-df9c-4340-a03f-f0728f0fdb10","Type":"ContainerStarted","Data":"4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48"} Dec 03 18:20:16 crc kubenswrapper[5002]: I1203 18:20:16.141124 5002 generic.go:334] "Generic (PLEG): container finished" podID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerID="b3e2e0a08bfbd1a4f22bd888cff5cd1ac1309c8c7bf4b099981b8d56184adf9c" exitCode=0 Dec 03 18:20:16 crc kubenswrapper[5002]: I1203 18:20:16.141149 5002 generic.go:334] "Generic (PLEG): container finished" podID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerID="50db02e4e0d5609acd17c000564079f46f10defd0e01396cf02ee31c4f20f772" exitCode=0 Dec 03 18:20:16 crc kubenswrapper[5002]: I1203 18:20:16.141156 5002 generic.go:334] "Generic (PLEG): container finished" podID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerID="b8fef8ec3fb89744fb6bb70e60dfc7aab3dbbd2e9578cf1f8e8e8e85104bf20b" exitCode=0 Dec 03 18:20:16 crc kubenswrapper[5002]: I1203 18:20:16.141180 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9bb398a0-a1a6-4f3a-98b2-5990a780a670","Type":"ContainerDied","Data":"b3e2e0a08bfbd1a4f22bd888cff5cd1ac1309c8c7bf4b099981b8d56184adf9c"} Dec 03 18:20:16 crc kubenswrapper[5002]: I1203 18:20:16.141207 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9bb398a0-a1a6-4f3a-98b2-5990a780a670","Type":"ContainerDied","Data":"50db02e4e0d5609acd17c000564079f46f10defd0e01396cf02ee31c4f20f772"} Dec 03 18:20:16 crc kubenswrapper[5002]: I1203 18:20:16.141216 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9bb398a0-a1a6-4f3a-98b2-5990a780a670","Type":"ContainerDied","Data":"b8fef8ec3fb89744fb6bb70e60dfc7aab3dbbd2e9578cf1f8e8e8e85104bf20b"} Dec 03 18:20:17 crc kubenswrapper[5002]: I1203 18:20:17.154423 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"95957e23-df9c-4340-a03f-f0728f0fdb10","Type":"ContainerStarted","Data":"8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76"} Dec 03 18:20:18 crc kubenswrapper[5002]: I1203 18:20:18.168038 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"95957e23-df9c-4340-a03f-f0728f0fdb10","Type":"ContainerStarted","Data":"7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f"} Dec 03 18:20:18 crc kubenswrapper[5002]: I1203 18:20:18.168376 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 18:20:18 crc kubenswrapper[5002]: I1203 18:20:18.210043 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.8300513280000001 podStartE2EDuration="5.210016582s" podCreationTimestamp="2025-12-03 18:20:13 +0000 UTC" firstStartedPulling="2025-12-03 18:20:14.15567093 +0000 UTC m=+6537.569492818" lastFinishedPulling="2025-12-03 18:20:17.535636184 +0000 UTC m=+6540.949458072" observedRunningTime="2025-12-03 18:20:18.198576944 +0000 UTC m=+6541.612398932" watchObservedRunningTime="2025-12-03 18:20:18.210016582 +0000 UTC m=+6541.623838480" Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.052157 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-ldc6d"] Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.068883 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-1875-account-create-update-fx5zd"] Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.081280 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-xhbnc"] Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.090366 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-2e7f-account-create-update-hm9vx"] Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.098924 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-9ba0-account-create-update-jxxpd"] Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.108198 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-2mw49"] Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.116717 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-2e7f-account-create-update-hm9vx"] Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.125547 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-ldc6d"] Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.157278 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-9ba0-account-create-update-jxxpd"] Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.169026 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-2mw49"] Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.180051 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-1875-account-create-update-fx5zd"] Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.188713 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-xhbnc"] Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.228882 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.237855 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.855909 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="055e8fd2-1d52-401b-acdd-c55bc568e4fa" path="/var/lib/kubelet/pods/055e8fd2-1d52-401b-acdd-c55bc568e4fa/volumes" Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.857073 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13739dfc-aae8-4815-96e7-b59d41e09486" path="/var/lib/kubelet/pods/13739dfc-aae8-4815-96e7-b59d41e09486/volumes" Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.858070 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a71c70e-3a49-4f5f-a891-145eea3e70b1" path="/var/lib/kubelet/pods/2a71c70e-3a49-4f5f-a891-145eea3e70b1/volumes" Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.859093 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4448265e-9399-4ef2-9e41-bdba3fd2fe6a" path="/var/lib/kubelet/pods/4448265e-9399-4ef2-9e41-bdba3fd2fe6a/volumes" Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.861442 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2569fc9-80e8-4c61-8513-8422d954e2de" path="/var/lib/kubelet/pods/c2569fc9-80e8-4c61-8513-8422d954e2de/volumes" Dec 03 18:20:24 crc kubenswrapper[5002]: I1203 18:20:24.862705 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1554d19-8306-4834-86f0-24395c08de62" path="/var/lib/kubelet/pods/e1554d19-8306-4834-86f0-24395c08de62/volumes" Dec 03 18:20:25 crc kubenswrapper[5002]: I1203 18:20:25.247350 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 03 18:20:34 crc kubenswrapper[5002]: I1203 18:20:34.038515 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l5f9c"] Dec 03 18:20:34 crc kubenswrapper[5002]: I1203 18:20:34.049125 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l5f9c"] Dec 03 18:20:34 crc kubenswrapper[5002]: I1203 18:20:34.852574 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952" path="/var/lib/kubelet/pods/d2fdc5c0-aa4d-4bb3-a9c7-8ed9d5808952/volumes" Dec 03 18:20:42 crc kubenswrapper[5002]: I1203 18:20:42.527842 5002 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="bf88da24-95ba-4fba-88ed-2f8f85fa5e3f" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.1.140:3000/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 18:20:43 crc kubenswrapper[5002]: I1203 18:20:43.574808 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.473002 5002 generic.go:334] "Generic (PLEG): container finished" podID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerID="071ea2ac06373127f93e8de1e8ff9fd7d6fc73004f3a49a0020e7309b41e0a62" exitCode=137 Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.473482 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9bb398a0-a1a6-4f3a-98b2-5990a780a670","Type":"ContainerDied","Data":"071ea2ac06373127f93e8de1e8ff9fd7d6fc73004f3a49a0020e7309b41e0a62"} Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.661481 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.778456 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dntf\" (UniqueName: \"kubernetes.io/projected/9bb398a0-a1a6-4f3a-98b2-5990a780a670-kube-api-access-2dntf\") pod \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.778930 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-combined-ca-bundle\") pod \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.778953 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-config-data\") pod \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.779129 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-scripts\") pod \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\" (UID: \"9bb398a0-a1a6-4f3a-98b2-5990a780a670\") " Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.784537 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-scripts" (OuterVolumeSpecName: "scripts") pod "9bb398a0-a1a6-4f3a-98b2-5990a780a670" (UID: "9bb398a0-a1a6-4f3a-98b2-5990a780a670"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.784671 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bb398a0-a1a6-4f3a-98b2-5990a780a670-kube-api-access-2dntf" (OuterVolumeSpecName: "kube-api-access-2dntf") pod "9bb398a0-a1a6-4f3a-98b2-5990a780a670" (UID: "9bb398a0-a1a6-4f3a-98b2-5990a780a670"). InnerVolumeSpecName "kube-api-access-2dntf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.881353 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.881382 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dntf\" (UniqueName: \"kubernetes.io/projected/9bb398a0-a1a6-4f3a-98b2-5990a780a670-kube-api-access-2dntf\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.896930 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9bb398a0-a1a6-4f3a-98b2-5990a780a670" (UID: "9bb398a0-a1a6-4f3a-98b2-5990a780a670"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.904887 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-config-data" (OuterVolumeSpecName: "config-data") pod "9bb398a0-a1a6-4f3a-98b2-5990a780a670" (UID: "9bb398a0-a1a6-4f3a-98b2-5990a780a670"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.983761 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:45 crc kubenswrapper[5002]: I1203 18:20:45.983981 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bb398a0-a1a6-4f3a-98b2-5990a780a670-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.485500 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9bb398a0-a1a6-4f3a-98b2-5990a780a670","Type":"ContainerDied","Data":"f7376ffe2bd608c500c624c6a163e824f4b990a73227e3a6a2f6b09d60d77bff"} Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.485550 5002 scope.go:117] "RemoveContainer" containerID="071ea2ac06373127f93e8de1e8ff9fd7d6fc73004f3a49a0020e7309b41e0a62" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.485554 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.514473 5002 scope.go:117] "RemoveContainer" containerID="b3e2e0a08bfbd1a4f22bd888cff5cd1ac1309c8c7bf4b099981b8d56184adf9c" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.520859 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.548192 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-0"] Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.559269 5002 scope.go:117] "RemoveContainer" containerID="50db02e4e0d5609acd17c000564079f46f10defd0e01396cf02ee31c4f20f772" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.562383 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Dec 03 18:20:46 crc kubenswrapper[5002]: E1203 18:20:46.562867 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-evaluator" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.562879 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-evaluator" Dec 03 18:20:46 crc kubenswrapper[5002]: E1203 18:20:46.562900 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-listener" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.562906 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-listener" Dec 03 18:20:46 crc kubenswrapper[5002]: E1203 18:20:46.562930 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-api" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.562935 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-api" Dec 03 18:20:46 crc kubenswrapper[5002]: E1203 18:20:46.562954 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-notifier" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.562960 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-notifier" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.563139 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-evaluator" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.563161 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-api" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.563172 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-listener" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.563186 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" containerName="aodh-notifier" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.565664 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.569975 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-public-svc" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.570205 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.570278 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-internal-svc" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.570393 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.570518 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-zznvp" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.572356 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.608927 5002 scope.go:117] "RemoveContainer" containerID="b8fef8ec3fb89744fb6bb70e60dfc7aab3dbbd2e9578cf1f8e8e8e85104bf20b" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.697662 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-public-tls-certs\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.697715 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-scripts\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.697741 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-internal-tls-certs\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.697806 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdr2w\" (UniqueName: \"kubernetes.io/projected/d767077e-2792-43d7-aedc-638ddd3adb65-kube-api-access-xdr2w\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.697870 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-combined-ca-bundle\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.697909 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-config-data\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.799786 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-combined-ca-bundle\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.799879 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-config-data\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.800008 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-public-tls-certs\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.800049 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-scripts\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.800079 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-internal-tls-certs\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.800113 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdr2w\" (UniqueName: \"kubernetes.io/projected/d767077e-2792-43d7-aedc-638ddd3adb65-kube-api-access-xdr2w\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.803862 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-scripts\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.806486 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-public-tls-certs\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.806648 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-internal-tls-certs\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.807100 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-combined-ca-bundle\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.807848 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d767077e-2792-43d7-aedc-638ddd3adb65-config-data\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.816351 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdr2w\" (UniqueName: \"kubernetes.io/projected/d767077e-2792-43d7-aedc-638ddd3adb65-kube-api-access-xdr2w\") pod \"aodh-0\" (UID: \"d767077e-2792-43d7-aedc-638ddd3adb65\") " pod="openstack/aodh-0" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.855920 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bb398a0-a1a6-4f3a-98b2-5990a780a670" path="/var/lib/kubelet/pods/9bb398a0-a1a6-4f3a-98b2-5990a780a670/volumes" Dec 03 18:20:46 crc kubenswrapper[5002]: I1203 18:20:46.895633 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 18:20:47 crc kubenswrapper[5002]: I1203 18:20:47.460223 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 18:20:47 crc kubenswrapper[5002]: I1203 18:20:47.497617 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"d767077e-2792-43d7-aedc-638ddd3adb65","Type":"ContainerStarted","Data":"62035463f01eddfc15a753e2aac70eb24380ceb1a7aebfc0614af62dd60b3063"} Dec 03 18:20:47 crc kubenswrapper[5002]: I1203 18:20:47.632355 5002 scope.go:117] "RemoveContainer" containerID="a733986b48e0f939b974aa88e70f14245f3a7035998271f6ca1951145d6c8311" Dec 03 18:20:47 crc kubenswrapper[5002]: I1203 18:20:47.675151 5002 scope.go:117] "RemoveContainer" containerID="a09fb65f91f4c09e8e31acf07cdb0e1abc946a43d4e0333ff62b7fafafebfac1" Dec 03 18:20:47 crc kubenswrapper[5002]: I1203 18:20:47.737689 5002 scope.go:117] "RemoveContainer" containerID="f2de2ff71ab389e2fadb93c19fc3aab7ea754fdd5e26d68c73eb27de17efaa64" Dec 03 18:20:47 crc kubenswrapper[5002]: I1203 18:20:47.786172 5002 scope.go:117] "RemoveContainer" containerID="06bfef7697c7234b0d9ce7f48dc3f1ce9fc6347a71a541a8d12b48ff25adc6f4" Dec 03 18:20:47 crc kubenswrapper[5002]: I1203 18:20:47.900909 5002 scope.go:117] "RemoveContainer" containerID="b79d3c183a49f39dbfad85ac5c6b92ea38d7cdd82b8eea1c33ad8da79b1d7e67" Dec 03 18:20:47 crc kubenswrapper[5002]: I1203 18:20:47.988005 5002 scope.go:117] "RemoveContainer" containerID="f57dad9a8086d5be1fb1dd7f7800833c8c0ffacb449ab818e5a354d1c70b8e7c" Dec 03 18:20:48 crc kubenswrapper[5002]: I1203 18:20:48.042948 5002 scope.go:117] "RemoveContainer" containerID="00337c103c82e951e95fe478d5390ae774dd9ccba7bc4fc35e48fc3f4ae57596" Dec 03 18:20:48 crc kubenswrapper[5002]: I1203 18:20:48.074912 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 18:20:48 crc kubenswrapper[5002]: I1203 18:20:48.075182 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="9f647f42-8d07-4dbf-8358-d8e20957b795" containerName="kube-state-metrics" containerID="cri-o://7892f8ab4863464d0a7f8520c818cbd5d685bc1b39ef77da923960d5701c3315" gracePeriod=30 Dec 03 18:20:48 crc kubenswrapper[5002]: I1203 18:20:48.514636 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"d767077e-2792-43d7-aedc-638ddd3adb65","Type":"ContainerStarted","Data":"39ff18fe036e598fd75a843368d94d3533168ff74594efa72648da445f1aa57a"} Dec 03 18:20:48 crc kubenswrapper[5002]: I1203 18:20:48.516496 5002 generic.go:334] "Generic (PLEG): container finished" podID="9f647f42-8d07-4dbf-8358-d8e20957b795" containerID="7892f8ab4863464d0a7f8520c818cbd5d685bc1b39ef77da923960d5701c3315" exitCode=2 Dec 03 18:20:48 crc kubenswrapper[5002]: I1203 18:20:48.516538 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9f647f42-8d07-4dbf-8358-d8e20957b795","Type":"ContainerDied","Data":"7892f8ab4863464d0a7f8520c818cbd5d685bc1b39ef77da923960d5701c3315"} Dec 03 18:20:48 crc kubenswrapper[5002]: I1203 18:20:48.623601 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 18:20:48 crc kubenswrapper[5002]: I1203 18:20:48.738885 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m92p2\" (UniqueName: \"kubernetes.io/projected/9f647f42-8d07-4dbf-8358-d8e20957b795-kube-api-access-m92p2\") pod \"9f647f42-8d07-4dbf-8358-d8e20957b795\" (UID: \"9f647f42-8d07-4dbf-8358-d8e20957b795\") " Dec 03 18:20:48 crc kubenswrapper[5002]: I1203 18:20:48.756839 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f647f42-8d07-4dbf-8358-d8e20957b795-kube-api-access-m92p2" (OuterVolumeSpecName: "kube-api-access-m92p2") pod "9f647f42-8d07-4dbf-8358-d8e20957b795" (UID: "9f647f42-8d07-4dbf-8358-d8e20957b795"). InnerVolumeSpecName "kube-api-access-m92p2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:20:48 crc kubenswrapper[5002]: I1203 18:20:48.841369 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m92p2\" (UniqueName: \"kubernetes.io/projected/9f647f42-8d07-4dbf-8358-d8e20957b795-kube-api-access-m92p2\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.527259 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9f647f42-8d07-4dbf-8358-d8e20957b795","Type":"ContainerDied","Data":"44e6aade50d461b9ec247f75234eb08f362074d8fae2671140ba140eeb3120e9"} Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.527295 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.527612 5002 scope.go:117] "RemoveContainer" containerID="7892f8ab4863464d0a7f8520c818cbd5d685bc1b39ef77da923960d5701c3315" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.532874 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"d767077e-2792-43d7-aedc-638ddd3adb65","Type":"ContainerStarted","Data":"756d0430e3c51bcccc80cf9ce47254c42105323840534042a81af9e21d9c6a60"} Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.659538 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.674204 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.688815 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 18:20:49 crc kubenswrapper[5002]: E1203 18:20:49.689275 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f647f42-8d07-4dbf-8358-d8e20957b795" containerName="kube-state-metrics" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.689287 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f647f42-8d07-4dbf-8358-d8e20957b795" containerName="kube-state-metrics" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.689510 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f647f42-8d07-4dbf-8358-d8e20957b795" containerName="kube-state-metrics" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.690253 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.694213 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.694566 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.696007 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.762961 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e8804817-b0bf-4ba4-98f9-0ff63cad140f-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e8804817-b0bf-4ba4-98f9-0ff63cad140f\") " pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.763050 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8804817-b0bf-4ba4-98f9-0ff63cad140f-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e8804817-b0bf-4ba4-98f9-0ff63cad140f\") " pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.763288 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8804817-b0bf-4ba4-98f9-0ff63cad140f-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e8804817-b0bf-4ba4-98f9-0ff63cad140f\") " pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.763410 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnfhl\" (UniqueName: \"kubernetes.io/projected/e8804817-b0bf-4ba4-98f9-0ff63cad140f-kube-api-access-wnfhl\") pod \"kube-state-metrics-0\" (UID: \"e8804817-b0bf-4ba4-98f9-0ff63cad140f\") " pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.865073 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e8804817-b0bf-4ba4-98f9-0ff63cad140f-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e8804817-b0bf-4ba4-98f9-0ff63cad140f\") " pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.865182 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8804817-b0bf-4ba4-98f9-0ff63cad140f-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e8804817-b0bf-4ba4-98f9-0ff63cad140f\") " pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.865250 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8804817-b0bf-4ba4-98f9-0ff63cad140f-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e8804817-b0bf-4ba4-98f9-0ff63cad140f\") " pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.865321 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnfhl\" (UniqueName: \"kubernetes.io/projected/e8804817-b0bf-4ba4-98f9-0ff63cad140f-kube-api-access-wnfhl\") pod \"kube-state-metrics-0\" (UID: \"e8804817-b0bf-4ba4-98f9-0ff63cad140f\") " pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.878382 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8804817-b0bf-4ba4-98f9-0ff63cad140f-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e8804817-b0bf-4ba4-98f9-0ff63cad140f\") " pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.883370 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8804817-b0bf-4ba4-98f9-0ff63cad140f-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e8804817-b0bf-4ba4-98f9-0ff63cad140f\") " pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.893288 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e8804817-b0bf-4ba4-98f9-0ff63cad140f-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e8804817-b0bf-4ba4-98f9-0ff63cad140f\") " pod="openstack/kube-state-metrics-0" Dec 03 18:20:49 crc kubenswrapper[5002]: I1203 18:20:49.920979 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnfhl\" (UniqueName: \"kubernetes.io/projected/e8804817-b0bf-4ba4-98f9-0ff63cad140f-kube-api-access-wnfhl\") pod \"kube-state-metrics-0\" (UID: \"e8804817-b0bf-4ba4-98f9-0ff63cad140f\") " pod="openstack/kube-state-metrics-0" Dec 03 18:20:50 crc kubenswrapper[5002]: I1203 18:20:50.018931 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 18:20:50 crc kubenswrapper[5002]: I1203 18:20:50.544825 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"d767077e-2792-43d7-aedc-638ddd3adb65","Type":"ContainerStarted","Data":"f08d2fb09f05fd7f2c97ac96d142e11931165b7a71898b6fa18822a23bc07857"} Dec 03 18:20:50 crc kubenswrapper[5002]: I1203 18:20:50.684786 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 18:20:50 crc kubenswrapper[5002]: W1203 18:20:50.691148 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8804817_b0bf_4ba4_98f9_0ff63cad140f.slice/crio-108d4007a4d3559f74e6433d0e6b14d331c1a137991464ea545c55fb8cd44eeb WatchSource:0}: Error finding container 108d4007a4d3559f74e6433d0e6b14d331c1a137991464ea545c55fb8cd44eeb: Status 404 returned error can't find the container with id 108d4007a4d3559f74e6433d0e6b14d331c1a137991464ea545c55fb8cd44eeb Dec 03 18:20:50 crc kubenswrapper[5002]: I1203 18:20:50.851901 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f647f42-8d07-4dbf-8358-d8e20957b795" path="/var/lib/kubelet/pods/9f647f42-8d07-4dbf-8358-d8e20957b795/volumes" Dec 03 18:20:50 crc kubenswrapper[5002]: I1203 18:20:50.997758 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:20:50 crc kubenswrapper[5002]: I1203 18:20:50.998013 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="ceilometer-central-agent" containerID="cri-o://3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f" gracePeriod=30 Dec 03 18:20:50 crc kubenswrapper[5002]: I1203 18:20:50.998399 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="proxy-httpd" containerID="cri-o://7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f" gracePeriod=30 Dec 03 18:20:50 crc kubenswrapper[5002]: I1203 18:20:50.998446 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="sg-core" containerID="cri-o://8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76" gracePeriod=30 Dec 03 18:20:50 crc kubenswrapper[5002]: I1203 18:20:50.998477 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="ceilometer-notification-agent" containerID="cri-o://4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48" gracePeriod=30 Dec 03 18:20:51 crc kubenswrapper[5002]: I1203 18:20:51.556884 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e8804817-b0bf-4ba4-98f9-0ff63cad140f","Type":"ContainerStarted","Data":"d9ceee2013740bde3ccff805f569db45d32cfee0288348b8d08655a7dfd7997f"} Dec 03 18:20:51 crc kubenswrapper[5002]: I1203 18:20:51.558330 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e8804817-b0bf-4ba4-98f9-0ff63cad140f","Type":"ContainerStarted","Data":"108d4007a4d3559f74e6433d0e6b14d331c1a137991464ea545c55fb8cd44eeb"} Dec 03 18:20:51 crc kubenswrapper[5002]: I1203 18:20:51.558447 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 18:20:51 crc kubenswrapper[5002]: I1203 18:20:51.561209 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"d767077e-2792-43d7-aedc-638ddd3adb65","Type":"ContainerStarted","Data":"349b1cee7cb34324c0f770c5582822f9c7654b88c3e8ead73fe5313544aa0410"} Dec 03 18:20:51 crc kubenswrapper[5002]: I1203 18:20:51.564602 5002 generic.go:334] "Generic (PLEG): container finished" podID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerID="7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f" exitCode=0 Dec 03 18:20:51 crc kubenswrapper[5002]: I1203 18:20:51.564877 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"95957e23-df9c-4340-a03f-f0728f0fdb10","Type":"ContainerDied","Data":"7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f"} Dec 03 18:20:51 crc kubenswrapper[5002]: I1203 18:20:51.564931 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"95957e23-df9c-4340-a03f-f0728f0fdb10","Type":"ContainerDied","Data":"8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76"} Dec 03 18:20:51 crc kubenswrapper[5002]: I1203 18:20:51.564883 5002 generic.go:334] "Generic (PLEG): container finished" podID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerID="8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76" exitCode=2 Dec 03 18:20:51 crc kubenswrapper[5002]: I1203 18:20:51.564957 5002 generic.go:334] "Generic (PLEG): container finished" podID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerID="3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f" exitCode=0 Dec 03 18:20:51 crc kubenswrapper[5002]: I1203 18:20:51.564994 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"95957e23-df9c-4340-a03f-f0728f0fdb10","Type":"ContainerDied","Data":"3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f"} Dec 03 18:20:51 crc kubenswrapper[5002]: I1203 18:20:51.590511 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.221783507 podStartE2EDuration="2.590495411s" podCreationTimestamp="2025-12-03 18:20:49 +0000 UTC" firstStartedPulling="2025-12-03 18:20:50.69366538 +0000 UTC m=+6574.107487268" lastFinishedPulling="2025-12-03 18:20:51.062377284 +0000 UTC m=+6574.476199172" observedRunningTime="2025-12-03 18:20:51.576866863 +0000 UTC m=+6574.990688771" watchObservedRunningTime="2025-12-03 18:20:51.590495411 +0000 UTC m=+6575.004317299" Dec 03 18:20:51 crc kubenswrapper[5002]: I1203 18:20:51.600154 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.727782102 podStartE2EDuration="5.600139221s" podCreationTimestamp="2025-12-03 18:20:46 +0000 UTC" firstStartedPulling="2025-12-03 18:20:47.45412518 +0000 UTC m=+6570.867947068" lastFinishedPulling="2025-12-03 18:20:50.326482299 +0000 UTC m=+6573.740304187" observedRunningTime="2025-12-03 18:20:51.599571035 +0000 UTC m=+6575.013392923" watchObservedRunningTime="2025-12-03 18:20:51.600139221 +0000 UTC m=+6575.013961109" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.349080 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.443854 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2k5sg\" (UniqueName: \"kubernetes.io/projected/95957e23-df9c-4340-a03f-f0728f0fdb10-kube-api-access-2k5sg\") pod \"95957e23-df9c-4340-a03f-f0728f0fdb10\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.443937 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-scripts\") pod \"95957e23-df9c-4340-a03f-f0728f0fdb10\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.443962 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-combined-ca-bundle\") pod \"95957e23-df9c-4340-a03f-f0728f0fdb10\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.444005 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95957e23-df9c-4340-a03f-f0728f0fdb10-run-httpd\") pod \"95957e23-df9c-4340-a03f-f0728f0fdb10\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.444052 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95957e23-df9c-4340-a03f-f0728f0fdb10-log-httpd\") pod \"95957e23-df9c-4340-a03f-f0728f0fdb10\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.444110 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-config-data\") pod \"95957e23-df9c-4340-a03f-f0728f0fdb10\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.444152 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-sg-core-conf-yaml\") pod \"95957e23-df9c-4340-a03f-f0728f0fdb10\" (UID: \"95957e23-df9c-4340-a03f-f0728f0fdb10\") " Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.445515 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95957e23-df9c-4340-a03f-f0728f0fdb10-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "95957e23-df9c-4340-a03f-f0728f0fdb10" (UID: "95957e23-df9c-4340-a03f-f0728f0fdb10"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.445882 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95957e23-df9c-4340-a03f-f0728f0fdb10-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "95957e23-df9c-4340-a03f-f0728f0fdb10" (UID: "95957e23-df9c-4340-a03f-f0728f0fdb10"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.452149 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-scripts" (OuterVolumeSpecName: "scripts") pod "95957e23-df9c-4340-a03f-f0728f0fdb10" (UID: "95957e23-df9c-4340-a03f-f0728f0fdb10"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.462864 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95957e23-df9c-4340-a03f-f0728f0fdb10-kube-api-access-2k5sg" (OuterVolumeSpecName: "kube-api-access-2k5sg") pod "95957e23-df9c-4340-a03f-f0728f0fdb10" (UID: "95957e23-df9c-4340-a03f-f0728f0fdb10"). InnerVolumeSpecName "kube-api-access-2k5sg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.481548 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "95957e23-df9c-4340-a03f-f0728f0fdb10" (UID: "95957e23-df9c-4340-a03f-f0728f0fdb10"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.548813 5002 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95957e23-df9c-4340-a03f-f0728f0fdb10-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.548854 5002 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.548867 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2k5sg\" (UniqueName: \"kubernetes.io/projected/95957e23-df9c-4340-a03f-f0728f0fdb10-kube-api-access-2k5sg\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.548885 5002 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.548896 5002 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95957e23-df9c-4340-a03f-f0728f0fdb10-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.573890 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "95957e23-df9c-4340-a03f-f0728f0fdb10" (UID: "95957e23-df9c-4340-a03f-f0728f0fdb10"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.596859 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-config-data" (OuterVolumeSpecName: "config-data") pod "95957e23-df9c-4340-a03f-f0728f0fdb10" (UID: "95957e23-df9c-4340-a03f-f0728f0fdb10"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.596891 5002 generic.go:334] "Generic (PLEG): container finished" podID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerID="4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48" exitCode=0 Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.596963 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"95957e23-df9c-4340-a03f-f0728f0fdb10","Type":"ContainerDied","Data":"4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48"} Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.597004 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.597024 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"95957e23-df9c-4340-a03f-f0728f0fdb10","Type":"ContainerDied","Data":"218e1ad655fa9432dc6d2abd51247f89fc29ca9a10e465c3aa12fc3d80df4286"} Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.597048 5002 scope.go:117] "RemoveContainer" containerID="7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.651284 5002 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.651318 5002 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95957e23-df9c-4340-a03f-f0728f0fdb10-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.722514 5002 scope.go:117] "RemoveContainer" containerID="8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.738615 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.773399 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.773952 5002 scope.go:117] "RemoveContainer" containerID="4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.794817 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:20:52 crc kubenswrapper[5002]: E1203 18:20:52.795400 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="ceilometer-notification-agent" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.795420 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="ceilometer-notification-agent" Dec 03 18:20:52 crc kubenswrapper[5002]: E1203 18:20:52.795469 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="sg-core" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.795477 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="sg-core" Dec 03 18:20:52 crc kubenswrapper[5002]: E1203 18:20:52.795486 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="ceilometer-central-agent" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.795495 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="ceilometer-central-agent" Dec 03 18:20:52 crc kubenswrapper[5002]: E1203 18:20:52.795524 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="proxy-httpd" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.795531 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="proxy-httpd" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.795794 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="ceilometer-notification-agent" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.795815 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="sg-core" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.795832 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="proxy-httpd" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.795844 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" containerName="ceilometer-central-agent" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.798234 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.803584 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.803754 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.815504 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.833469 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.857341 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-scripts\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.857410 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1f890031-6344-4527-b611-df1280e07069-log-httpd\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.857456 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.857488 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1f890031-6344-4527-b611-df1280e07069-run-httpd\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.857511 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.857573 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2n5q9\" (UniqueName: \"kubernetes.io/projected/1f890031-6344-4527-b611-df1280e07069-kube-api-access-2n5q9\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.857589 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-config-data\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.857624 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.859913 5002 scope.go:117] "RemoveContainer" containerID="3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.889517 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95957e23-df9c-4340-a03f-f0728f0fdb10" path="/var/lib/kubelet/pods/95957e23-df9c-4340-a03f-f0728f0fdb10/volumes" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.927897 5002 scope.go:117] "RemoveContainer" containerID="7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f" Dec 03 18:20:52 crc kubenswrapper[5002]: E1203 18:20:52.933860 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f\": container with ID starting with 7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f not found: ID does not exist" containerID="7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.933902 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f"} err="failed to get container status \"7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f\": rpc error: code = NotFound desc = could not find container \"7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f\": container with ID starting with 7cdc73af3bf8b8fe157949f3f0cd0bd677194d060aa72245e680ff32fd13855f not found: ID does not exist" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.933925 5002 scope.go:117] "RemoveContainer" containerID="8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76" Dec 03 18:20:52 crc kubenswrapper[5002]: E1203 18:20:52.934218 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76\": container with ID starting with 8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76 not found: ID does not exist" containerID="8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.934241 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76"} err="failed to get container status \"8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76\": rpc error: code = NotFound desc = could not find container \"8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76\": container with ID starting with 8b1693829164ed733feed2b5995eb6fcf1fc83a0f0b4b7638e8a7069f6418e76 not found: ID does not exist" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.934255 5002 scope.go:117] "RemoveContainer" containerID="4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48" Dec 03 18:20:52 crc kubenswrapper[5002]: E1203 18:20:52.934423 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48\": container with ID starting with 4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48 not found: ID does not exist" containerID="4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.934444 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48"} err="failed to get container status \"4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48\": rpc error: code = NotFound desc = could not find container \"4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48\": container with ID starting with 4050c8e79eb6d362b8dea2fb3773fb2f5b0ab89b4b06082c4457e02b57443c48 not found: ID does not exist" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.934458 5002 scope.go:117] "RemoveContainer" containerID="3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f" Dec 03 18:20:52 crc kubenswrapper[5002]: E1203 18:20:52.934619 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f\": container with ID starting with 3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f not found: ID does not exist" containerID="3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.934641 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f"} err="failed to get container status \"3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f\": rpc error: code = NotFound desc = could not find container \"3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f\": container with ID starting with 3c89cfcbd7e874eb35e07628a24295a5b31bb42fc29beb83a9815c5372fea26f not found: ID does not exist" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.960110 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.960390 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2n5q9\" (UniqueName: \"kubernetes.io/projected/1f890031-6344-4527-b611-df1280e07069-kube-api-access-2n5q9\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.960438 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-config-data\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.960575 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.960797 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-scripts\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.960983 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1f890031-6344-4527-b611-df1280e07069-log-httpd\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.961117 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.961232 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1f890031-6344-4527-b611-df1280e07069-run-httpd\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.963799 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1f890031-6344-4527-b611-df1280e07069-log-httpd\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.963906 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1f890031-6344-4527-b611-df1280e07069-run-httpd\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.967264 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.968782 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-scripts\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.970163 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-config-data\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.983310 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.986367 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f890031-6344-4527-b611-df1280e07069-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:52 crc kubenswrapper[5002]: I1203 18:20:52.986865 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2n5q9\" (UniqueName: \"kubernetes.io/projected/1f890031-6344-4527-b611-df1280e07069-kube-api-access-2n5q9\") pod \"ceilometer-0\" (UID: \"1f890031-6344-4527-b611-df1280e07069\") " pod="openstack/ceilometer-0" Dec 03 18:20:53 crc kubenswrapper[5002]: I1203 18:20:53.036714 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-sbg8b"] Dec 03 18:20:53 crc kubenswrapper[5002]: I1203 18:20:53.048290 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-sbg8b"] Dec 03 18:20:53 crc kubenswrapper[5002]: I1203 18:20:53.173526 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 18:20:53 crc kubenswrapper[5002]: I1203 18:20:53.756240 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 18:20:53 crc kubenswrapper[5002]: W1203 18:20:53.756976 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f890031_6344_4527_b611_df1280e07069.slice/crio-156bd23c9f3c24f0c8ee9c3df36a51194617c4c66fc0aa557db4546e376f5f44 WatchSource:0}: Error finding container 156bd23c9f3c24f0c8ee9c3df36a51194617c4c66fc0aa557db4546e376f5f44: Status 404 returned error can't find the container with id 156bd23c9f3c24f0c8ee9c3df36a51194617c4c66fc0aa557db4546e376f5f44 Dec 03 18:20:54 crc kubenswrapper[5002]: I1203 18:20:54.031627 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-xw92n"] Dec 03 18:20:54 crc kubenswrapper[5002]: I1203 18:20:54.043025 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-xw92n"] Dec 03 18:20:54 crc kubenswrapper[5002]: I1203 18:20:54.643220 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1f890031-6344-4527-b611-df1280e07069","Type":"ContainerStarted","Data":"8c954655dbf0c56e6bf4c7b5b84b169bfc61777bc32214c2ade03fb0b54ef3b4"} Dec 03 18:20:54 crc kubenswrapper[5002]: I1203 18:20:54.643585 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1f890031-6344-4527-b611-df1280e07069","Type":"ContainerStarted","Data":"156bd23c9f3c24f0c8ee9c3df36a51194617c4c66fc0aa557db4546e376f5f44"} Dec 03 18:20:54 crc kubenswrapper[5002]: I1203 18:20:54.851805 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67230eb7-7694-4dd0-9f46-e05364abfc60" path="/var/lib/kubelet/pods/67230eb7-7694-4dd0-9f46-e05364abfc60/volumes" Dec 03 18:20:54 crc kubenswrapper[5002]: I1203 18:20:54.853966 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="954c519e-193b-486a-9b88-fc780ef7877b" path="/var/lib/kubelet/pods/954c519e-193b-486a-9b88-fc780ef7877b/volumes" Dec 03 18:20:55 crc kubenswrapper[5002]: I1203 18:20:55.663895 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1f890031-6344-4527-b611-df1280e07069","Type":"ContainerStarted","Data":"539915420c663b671c74b6c4b782f5ae519ffe6975f9cf267e58f71023e0232a"} Dec 03 18:20:56 crc kubenswrapper[5002]: I1203 18:20:56.684330 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1f890031-6344-4527-b611-df1280e07069","Type":"ContainerStarted","Data":"db34b9646b33c994b684aeca2d7dd9ef7207fa071b87ef20ea41258b2f087991"} Dec 03 18:20:57 crc kubenswrapper[5002]: I1203 18:20:57.698352 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1f890031-6344-4527-b611-df1280e07069","Type":"ContainerStarted","Data":"be622cb25e9ac8e566fd3c520a5ce721786f2067ed47c7ee31fcb58f7fdc5d4d"} Dec 03 18:20:57 crc kubenswrapper[5002]: I1203 18:20:57.698877 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 18:20:57 crc kubenswrapper[5002]: I1203 18:20:57.737518 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.510490913 podStartE2EDuration="5.737499017s" podCreationTimestamp="2025-12-03 18:20:52 +0000 UTC" firstStartedPulling="2025-12-03 18:20:53.761105625 +0000 UTC m=+6577.174927513" lastFinishedPulling="2025-12-03 18:20:56.988113729 +0000 UTC m=+6580.401935617" observedRunningTime="2025-12-03 18:20:57.729276815 +0000 UTC m=+6581.143098703" watchObservedRunningTime="2025-12-03 18:20:57.737499017 +0000 UTC m=+6581.151320905" Dec 03 18:21:00 crc kubenswrapper[5002]: I1203 18:21:00.031542 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 18:21:11 crc kubenswrapper[5002]: I1203 18:21:11.050961 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-v9xv5"] Dec 03 18:21:11 crc kubenswrapper[5002]: I1203 18:21:11.066895 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-v9xv5"] Dec 03 18:21:12 crc kubenswrapper[5002]: I1203 18:21:12.862863 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cbade77-df8d-48c2-883c-deeec7b8f6ea" path="/var/lib/kubelet/pods/5cbade77-df8d-48c2-883c-deeec7b8f6ea/volumes" Dec 03 18:21:23 crc kubenswrapper[5002]: I1203 18:21:23.185284 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.169181 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nksmz"] Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.171846 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.216364 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nksmz"] Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.373789 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8dca089f-c4fc-43a1-a791-87fc9425cfba-catalog-content\") pod \"certified-operators-nksmz\" (UID: \"8dca089f-c4fc-43a1-a791-87fc9425cfba\") " pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.373949 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4sz4\" (UniqueName: \"kubernetes.io/projected/8dca089f-c4fc-43a1-a791-87fc9425cfba-kube-api-access-s4sz4\") pod \"certified-operators-nksmz\" (UID: \"8dca089f-c4fc-43a1-a791-87fc9425cfba\") " pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.373998 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8dca089f-c4fc-43a1-a791-87fc9425cfba-utilities\") pod \"certified-operators-nksmz\" (UID: \"8dca089f-c4fc-43a1-a791-87fc9425cfba\") " pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.476045 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4sz4\" (UniqueName: \"kubernetes.io/projected/8dca089f-c4fc-43a1-a791-87fc9425cfba-kube-api-access-s4sz4\") pod \"certified-operators-nksmz\" (UID: \"8dca089f-c4fc-43a1-a791-87fc9425cfba\") " pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.476100 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8dca089f-c4fc-43a1-a791-87fc9425cfba-utilities\") pod \"certified-operators-nksmz\" (UID: \"8dca089f-c4fc-43a1-a791-87fc9425cfba\") " pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.476254 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8dca089f-c4fc-43a1-a791-87fc9425cfba-catalog-content\") pod \"certified-operators-nksmz\" (UID: \"8dca089f-c4fc-43a1-a791-87fc9425cfba\") " pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.476844 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8dca089f-c4fc-43a1-a791-87fc9425cfba-utilities\") pod \"certified-operators-nksmz\" (UID: \"8dca089f-c4fc-43a1-a791-87fc9425cfba\") " pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.476868 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8dca089f-c4fc-43a1-a791-87fc9425cfba-catalog-content\") pod \"certified-operators-nksmz\" (UID: \"8dca089f-c4fc-43a1-a791-87fc9425cfba\") " pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.510865 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4sz4\" (UniqueName: \"kubernetes.io/projected/8dca089f-c4fc-43a1-a791-87fc9425cfba-kube-api-access-s4sz4\") pod \"certified-operators-nksmz\" (UID: \"8dca089f-c4fc-43a1-a791-87fc9425cfba\") " pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:42 crc kubenswrapper[5002]: I1203 18:21:42.806429 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:43 crc kubenswrapper[5002]: I1203 18:21:43.345829 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nksmz"] Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.265333 5002 generic.go:334] "Generic (PLEG): container finished" podID="8dca089f-c4fc-43a1-a791-87fc9425cfba" containerID="0c2c2239f20395dffd49328dbb71028c59be6fbbfe4b511bec90e389a147e8d8" exitCode=0 Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.265416 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nksmz" event={"ID":"8dca089f-c4fc-43a1-a791-87fc9425cfba","Type":"ContainerDied","Data":"0c2c2239f20395dffd49328dbb71028c59be6fbbfe4b511bec90e389a147e8d8"} Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.265712 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nksmz" event={"ID":"8dca089f-c4fc-43a1-a791-87fc9425cfba","Type":"ContainerStarted","Data":"e5fdd92b8ddb7251972d1f74a6aca8d6c42d51e5423e308fcb2a54d1250ad7b3"} Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.573959 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-t4hh9"] Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.578177 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.583995 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t4hh9"] Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.735733 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-catalog-content\") pod \"redhat-marketplace-t4hh9\" (UID: \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\") " pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.735805 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9kjt\" (UniqueName: \"kubernetes.io/projected/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-kube-api-access-t9kjt\") pod \"redhat-marketplace-t4hh9\" (UID: \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\") " pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.736065 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-utilities\") pod \"redhat-marketplace-t4hh9\" (UID: \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\") " pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.839048 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-catalog-content\") pod \"redhat-marketplace-t4hh9\" (UID: \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\") " pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.839506 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9kjt\" (UniqueName: \"kubernetes.io/projected/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-kube-api-access-t9kjt\") pod \"redhat-marketplace-t4hh9\" (UID: \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\") " pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.839870 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-utilities\") pod \"redhat-marketplace-t4hh9\" (UID: \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\") " pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.839576 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-catalog-content\") pod \"redhat-marketplace-t4hh9\" (UID: \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\") " pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.841335 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-utilities\") pod \"redhat-marketplace-t4hh9\" (UID: \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\") " pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.865341 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9kjt\" (UniqueName: \"kubernetes.io/projected/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-kube-api-access-t9kjt\") pod \"redhat-marketplace-t4hh9\" (UID: \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\") " pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:44 crc kubenswrapper[5002]: I1203 18:21:44.924450 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:45 crc kubenswrapper[5002]: I1203 18:21:45.276945 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nksmz" event={"ID":"8dca089f-c4fc-43a1-a791-87fc9425cfba","Type":"ContainerStarted","Data":"563e9993989dbaa82a4c876a8a3972a1a9021043dd3553ec2b210a402d2878e2"} Dec 03 18:21:45 crc kubenswrapper[5002]: W1203 18:21:45.476961 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82585ffd_99ac_4fb1_8b8d_434fa380bd1f.slice/crio-e26f658ebef5463cfb7628bc9dd77f860ed492cd23c3768bfb203ad00dc17b43 WatchSource:0}: Error finding container e26f658ebef5463cfb7628bc9dd77f860ed492cd23c3768bfb203ad00dc17b43: Status 404 returned error can't find the container with id e26f658ebef5463cfb7628bc9dd77f860ed492cd23c3768bfb203ad00dc17b43 Dec 03 18:21:45 crc kubenswrapper[5002]: I1203 18:21:45.487971 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t4hh9"] Dec 03 18:21:46 crc kubenswrapper[5002]: I1203 18:21:46.286181 5002 generic.go:334] "Generic (PLEG): container finished" podID="82585ffd-99ac-4fb1-8b8d-434fa380bd1f" containerID="9a18e426d7fdfcfc12f5c08cfed6cc5c9c9010b3fac19c492b2dbe00e7c2f53a" exitCode=0 Dec 03 18:21:46 crc kubenswrapper[5002]: I1203 18:21:46.286367 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4hh9" event={"ID":"82585ffd-99ac-4fb1-8b8d-434fa380bd1f","Type":"ContainerDied","Data":"9a18e426d7fdfcfc12f5c08cfed6cc5c9c9010b3fac19c492b2dbe00e7c2f53a"} Dec 03 18:21:46 crc kubenswrapper[5002]: I1203 18:21:46.286526 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4hh9" event={"ID":"82585ffd-99ac-4fb1-8b8d-434fa380bd1f","Type":"ContainerStarted","Data":"e26f658ebef5463cfb7628bc9dd77f860ed492cd23c3768bfb203ad00dc17b43"} Dec 03 18:21:46 crc kubenswrapper[5002]: I1203 18:21:46.289662 5002 generic.go:334] "Generic (PLEG): container finished" podID="8dca089f-c4fc-43a1-a791-87fc9425cfba" containerID="563e9993989dbaa82a4c876a8a3972a1a9021043dd3553ec2b210a402d2878e2" exitCode=0 Dec 03 18:21:46 crc kubenswrapper[5002]: I1203 18:21:46.289696 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nksmz" event={"ID":"8dca089f-c4fc-43a1-a791-87fc9425cfba","Type":"ContainerDied","Data":"563e9993989dbaa82a4c876a8a3972a1a9021043dd3553ec2b210a402d2878e2"} Dec 03 18:21:47 crc kubenswrapper[5002]: I1203 18:21:47.300347 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4hh9" event={"ID":"82585ffd-99ac-4fb1-8b8d-434fa380bd1f","Type":"ContainerStarted","Data":"6751a1411ded0ee96ad4369894d04ea8c6ea87dde9878ef80901d8530568698f"} Dec 03 18:21:47 crc kubenswrapper[5002]: I1203 18:21:47.304518 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nksmz" event={"ID":"8dca089f-c4fc-43a1-a791-87fc9425cfba","Type":"ContainerStarted","Data":"dccaebd819ae556c81dab1c81620ab2c3e68be9eac6f64fb0d2b38e584fcf42d"} Dec 03 18:21:47 crc kubenswrapper[5002]: I1203 18:21:47.342586 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nksmz" podStartSLOduration=2.879616236 podStartE2EDuration="5.342568546s" podCreationTimestamp="2025-12-03 18:21:42 +0000 UTC" firstStartedPulling="2025-12-03 18:21:44.268650376 +0000 UTC m=+6627.682472274" lastFinishedPulling="2025-12-03 18:21:46.731602686 +0000 UTC m=+6630.145424584" observedRunningTime="2025-12-03 18:21:47.337321254 +0000 UTC m=+6630.751143132" watchObservedRunningTime="2025-12-03 18:21:47.342568546 +0000 UTC m=+6630.756390434" Dec 03 18:21:48 crc kubenswrapper[5002]: I1203 18:21:48.357683 5002 scope.go:117] "RemoveContainer" containerID="37069fe5a03d001c1bbc4d0afec937eaaea8661e9b1ef1a2a1d7af1c7aef6606" Dec 03 18:21:48 crc kubenswrapper[5002]: I1203 18:21:48.466317 5002 scope.go:117] "RemoveContainer" containerID="90cc013c7795196536344e68fee35635bd5857d6ea0605b1a6e5adf6022eaf41" Dec 03 18:21:48 crc kubenswrapper[5002]: I1203 18:21:48.546110 5002 scope.go:117] "RemoveContainer" containerID="d007cc7fa615dddf284df24864c0e4f352d7a2864995f64799a61502b1eb6ac7" Dec 03 18:21:49 crc kubenswrapper[5002]: I1203 18:21:49.323851 5002 generic.go:334] "Generic (PLEG): container finished" podID="82585ffd-99ac-4fb1-8b8d-434fa380bd1f" containerID="6751a1411ded0ee96ad4369894d04ea8c6ea87dde9878ef80901d8530568698f" exitCode=0 Dec 03 18:21:49 crc kubenswrapper[5002]: I1203 18:21:49.323905 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4hh9" event={"ID":"82585ffd-99ac-4fb1-8b8d-434fa380bd1f","Type":"ContainerDied","Data":"6751a1411ded0ee96ad4369894d04ea8c6ea87dde9878ef80901d8530568698f"} Dec 03 18:21:50 crc kubenswrapper[5002]: I1203 18:21:50.335776 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4hh9" event={"ID":"82585ffd-99ac-4fb1-8b8d-434fa380bd1f","Type":"ContainerStarted","Data":"c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8"} Dec 03 18:21:50 crc kubenswrapper[5002]: I1203 18:21:50.363904 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-t4hh9" podStartSLOduration=2.915747616 podStartE2EDuration="6.363881807s" podCreationTimestamp="2025-12-03 18:21:44 +0000 UTC" firstStartedPulling="2025-12-03 18:21:46.2885056 +0000 UTC m=+6629.702327488" lastFinishedPulling="2025-12-03 18:21:49.736639791 +0000 UTC m=+6633.150461679" observedRunningTime="2025-12-03 18:21:50.353545289 +0000 UTC m=+6633.767367187" watchObservedRunningTime="2025-12-03 18:21:50.363881807 +0000 UTC m=+6633.777703695" Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.165309 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pjrqh/must-gather-vsg9p"] Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.173231 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pjrqh/must-gather-vsg9p" Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.176492 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-pjrqh"/"default-dockercfg-mkdx6" Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.180010 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-pjrqh"/"openshift-service-ca.crt" Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.180222 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-pjrqh"/"kube-root-ca.crt" Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.180433 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5779a321-8268-424b-bef2-08e0dd158ebc-must-gather-output\") pod \"must-gather-vsg9p\" (UID: \"5779a321-8268-424b-bef2-08e0dd158ebc\") " pod="openshift-must-gather-pjrqh/must-gather-vsg9p" Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.180515 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwhxn\" (UniqueName: \"kubernetes.io/projected/5779a321-8268-424b-bef2-08e0dd158ebc-kube-api-access-mwhxn\") pod \"must-gather-vsg9p\" (UID: \"5779a321-8268-424b-bef2-08e0dd158ebc\") " pod="openshift-must-gather-pjrqh/must-gather-vsg9p" Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.182276 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-pjrqh/must-gather-vsg9p"] Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.282516 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5779a321-8268-424b-bef2-08e0dd158ebc-must-gather-output\") pod \"must-gather-vsg9p\" (UID: \"5779a321-8268-424b-bef2-08e0dd158ebc\") " pod="openshift-must-gather-pjrqh/must-gather-vsg9p" Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.282607 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwhxn\" (UniqueName: \"kubernetes.io/projected/5779a321-8268-424b-bef2-08e0dd158ebc-kube-api-access-mwhxn\") pod \"must-gather-vsg9p\" (UID: \"5779a321-8268-424b-bef2-08e0dd158ebc\") " pod="openshift-must-gather-pjrqh/must-gather-vsg9p" Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.283282 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5779a321-8268-424b-bef2-08e0dd158ebc-must-gather-output\") pod \"must-gather-vsg9p\" (UID: \"5779a321-8268-424b-bef2-08e0dd158ebc\") " pod="openshift-must-gather-pjrqh/must-gather-vsg9p" Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.305213 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwhxn\" (UniqueName: \"kubernetes.io/projected/5779a321-8268-424b-bef2-08e0dd158ebc-kube-api-access-mwhxn\") pod \"must-gather-vsg9p\" (UID: \"5779a321-8268-424b-bef2-08e0dd158ebc\") " pod="openshift-must-gather-pjrqh/must-gather-vsg9p" Dec 03 18:21:51 crc kubenswrapper[5002]: I1203 18:21:51.500381 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pjrqh/must-gather-vsg9p" Dec 03 18:21:52 crc kubenswrapper[5002]: I1203 18:21:52.039005 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-pjrqh/must-gather-vsg9p"] Dec 03 18:21:52 crc kubenswrapper[5002]: I1203 18:21:52.365685 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pjrqh/must-gather-vsg9p" event={"ID":"5779a321-8268-424b-bef2-08e0dd158ebc","Type":"ContainerStarted","Data":"d52fda214035a4f4f4ec13e140ea33aecd4377a4a5e6ff565efd071e130534fd"} Dec 03 18:21:52 crc kubenswrapper[5002]: I1203 18:21:52.811270 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:52 crc kubenswrapper[5002]: I1203 18:21:52.811317 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:52 crc kubenswrapper[5002]: I1203 18:21:52.871057 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:53 crc kubenswrapper[5002]: I1203 18:21:53.533370 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:53 crc kubenswrapper[5002]: I1203 18:21:53.957128 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nksmz"] Dec 03 18:21:54 crc kubenswrapper[5002]: I1203 18:21:54.924702 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:54 crc kubenswrapper[5002]: I1203 18:21:54.925343 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:54 crc kubenswrapper[5002]: I1203 18:21:54.981561 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:55 crc kubenswrapper[5002]: I1203 18:21:55.403074 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nksmz" podUID="8dca089f-c4fc-43a1-a791-87fc9425cfba" containerName="registry-server" containerID="cri-o://dccaebd819ae556c81dab1c81620ab2c3e68be9eac6f64fb0d2b38e584fcf42d" gracePeriod=2 Dec 03 18:21:55 crc kubenswrapper[5002]: I1203 18:21:55.458490 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:56 crc kubenswrapper[5002]: I1203 18:21:56.353981 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t4hh9"] Dec 03 18:21:56 crc kubenswrapper[5002]: I1203 18:21:56.415484 5002 generic.go:334] "Generic (PLEG): container finished" podID="8dca089f-c4fc-43a1-a791-87fc9425cfba" containerID="dccaebd819ae556c81dab1c81620ab2c3e68be9eac6f64fb0d2b38e584fcf42d" exitCode=0 Dec 03 18:21:56 crc kubenswrapper[5002]: I1203 18:21:56.415574 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nksmz" event={"ID":"8dca089f-c4fc-43a1-a791-87fc9425cfba","Type":"ContainerDied","Data":"dccaebd819ae556c81dab1c81620ab2c3e68be9eac6f64fb0d2b38e584fcf42d"} Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.317623 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.427496 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pjrqh/must-gather-vsg9p" event={"ID":"5779a321-8268-424b-bef2-08e0dd158ebc","Type":"ContainerStarted","Data":"656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c"} Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.428426 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8dca089f-c4fc-43a1-a791-87fc9425cfba-catalog-content\") pod \"8dca089f-c4fc-43a1-a791-87fc9425cfba\" (UID: \"8dca089f-c4fc-43a1-a791-87fc9425cfba\") " Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.428822 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4sz4\" (UniqueName: \"kubernetes.io/projected/8dca089f-c4fc-43a1-a791-87fc9425cfba-kube-api-access-s4sz4\") pod \"8dca089f-c4fc-43a1-a791-87fc9425cfba\" (UID: \"8dca089f-c4fc-43a1-a791-87fc9425cfba\") " Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.428882 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8dca089f-c4fc-43a1-a791-87fc9425cfba-utilities\") pod \"8dca089f-c4fc-43a1-a791-87fc9425cfba\" (UID: \"8dca089f-c4fc-43a1-a791-87fc9425cfba\") " Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.430070 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8dca089f-c4fc-43a1-a791-87fc9425cfba-utilities" (OuterVolumeSpecName: "utilities") pod "8dca089f-c4fc-43a1-a791-87fc9425cfba" (UID: "8dca089f-c4fc-43a1-a791-87fc9425cfba"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.434102 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-t4hh9" podUID="82585ffd-99ac-4fb1-8b8d-434fa380bd1f" containerName="registry-server" containerID="cri-o://c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8" gracePeriod=2 Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.434451 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nksmz" Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.434977 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nksmz" event={"ID":"8dca089f-c4fc-43a1-a791-87fc9425cfba","Type":"ContainerDied","Data":"e5fdd92b8ddb7251972d1f74a6aca8d6c42d51e5423e308fcb2a54d1250ad7b3"} Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.435014 5002 scope.go:117] "RemoveContainer" containerID="dccaebd819ae556c81dab1c81620ab2c3e68be9eac6f64fb0d2b38e584fcf42d" Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.436167 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dca089f-c4fc-43a1-a791-87fc9425cfba-kube-api-access-s4sz4" (OuterVolumeSpecName: "kube-api-access-s4sz4") pod "8dca089f-c4fc-43a1-a791-87fc9425cfba" (UID: "8dca089f-c4fc-43a1-a791-87fc9425cfba"). InnerVolumeSpecName "kube-api-access-s4sz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.486948 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8dca089f-c4fc-43a1-a791-87fc9425cfba-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8dca089f-c4fc-43a1-a791-87fc9425cfba" (UID: "8dca089f-c4fc-43a1-a791-87fc9425cfba"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.533443 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4sz4\" (UniqueName: \"kubernetes.io/projected/8dca089f-c4fc-43a1-a791-87fc9425cfba-kube-api-access-s4sz4\") on node \"crc\" DevicePath \"\"" Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.533649 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8dca089f-c4fc-43a1-a791-87fc9425cfba-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.533663 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8dca089f-c4fc-43a1-a791-87fc9425cfba-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.597382 5002 scope.go:117] "RemoveContainer" containerID="563e9993989dbaa82a4c876a8a3972a1a9021043dd3553ec2b210a402d2878e2" Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.633593 5002 scope.go:117] "RemoveContainer" containerID="0c2c2239f20395dffd49328dbb71028c59be6fbbfe4b511bec90e389a147e8d8" Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.815434 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nksmz"] Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.829914 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nksmz"] Dec 03 18:21:57 crc kubenswrapper[5002]: I1203 18:21:57.968296 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.052472 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-catalog-content\") pod \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\" (UID: \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\") " Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.052602 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-utilities\") pod \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\" (UID: \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\") " Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.052657 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9kjt\" (UniqueName: \"kubernetes.io/projected/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-kube-api-access-t9kjt\") pod \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\" (UID: \"82585ffd-99ac-4fb1-8b8d-434fa380bd1f\") " Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.053848 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-utilities" (OuterVolumeSpecName: "utilities") pod "82585ffd-99ac-4fb1-8b8d-434fa380bd1f" (UID: "82585ffd-99ac-4fb1-8b8d-434fa380bd1f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.060064 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-kube-api-access-t9kjt" (OuterVolumeSpecName: "kube-api-access-t9kjt") pod "82585ffd-99ac-4fb1-8b8d-434fa380bd1f" (UID: "82585ffd-99ac-4fb1-8b8d-434fa380bd1f"). InnerVolumeSpecName "kube-api-access-t9kjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.069079 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "82585ffd-99ac-4fb1-8b8d-434fa380bd1f" (UID: "82585ffd-99ac-4fb1-8b8d-434fa380bd1f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.154659 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9kjt\" (UniqueName: \"kubernetes.io/projected/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-kube-api-access-t9kjt\") on node \"crc\" DevicePath \"\"" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.154692 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.154705 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82585ffd-99ac-4fb1-8b8d-434fa380bd1f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.444657 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pjrqh/must-gather-vsg9p" event={"ID":"5779a321-8268-424b-bef2-08e0dd158ebc","Type":"ContainerStarted","Data":"3847bd462163ceabab39528a8f8c49a889d1fa1feafe0f96232c710fba9d710c"} Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.448059 5002 generic.go:334] "Generic (PLEG): container finished" podID="82585ffd-99ac-4fb1-8b8d-434fa380bd1f" containerID="c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8" exitCode=0 Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.448118 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4hh9" event={"ID":"82585ffd-99ac-4fb1-8b8d-434fa380bd1f","Type":"ContainerDied","Data":"c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8"} Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.448145 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4hh9" event={"ID":"82585ffd-99ac-4fb1-8b8d-434fa380bd1f","Type":"ContainerDied","Data":"e26f658ebef5463cfb7628bc9dd77f860ed492cd23c3768bfb203ad00dc17b43"} Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.448163 5002 scope.go:117] "RemoveContainer" containerID="c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.448282 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t4hh9" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.464989 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-pjrqh/must-gather-vsg9p" podStartSLOduration=2.590114711 podStartE2EDuration="7.464972135s" podCreationTimestamp="2025-12-03 18:21:51 +0000 UTC" firstStartedPulling="2025-12-03 18:21:52.048315486 +0000 UTC m=+6635.462137384" lastFinishedPulling="2025-12-03 18:21:56.92317293 +0000 UTC m=+6640.336994808" observedRunningTime="2025-12-03 18:21:58.462048627 +0000 UTC m=+6641.875870515" watchObservedRunningTime="2025-12-03 18:21:58.464972135 +0000 UTC m=+6641.878794023" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.482175 5002 scope.go:117] "RemoveContainer" containerID="6751a1411ded0ee96ad4369894d04ea8c6ea87dde9878ef80901d8530568698f" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.543793 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t4hh9"] Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.547705 5002 scope.go:117] "RemoveContainer" containerID="9a18e426d7fdfcfc12f5c08cfed6cc5c9c9010b3fac19c492b2dbe00e7c2f53a" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.565129 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-t4hh9"] Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.592555 5002 scope.go:117] "RemoveContainer" containerID="c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8" Dec 03 18:21:58 crc kubenswrapper[5002]: E1203 18:21:58.595905 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8\": container with ID starting with c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8 not found: ID does not exist" containerID="c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.595947 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8"} err="failed to get container status \"c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8\": rpc error: code = NotFound desc = could not find container \"c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8\": container with ID starting with c2f0b9b17745af6afe780d20e4778e589634d8318acc3a34eb88827c48b585f8 not found: ID does not exist" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.595969 5002 scope.go:117] "RemoveContainer" containerID="6751a1411ded0ee96ad4369894d04ea8c6ea87dde9878ef80901d8530568698f" Dec 03 18:21:58 crc kubenswrapper[5002]: E1203 18:21:58.597112 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6751a1411ded0ee96ad4369894d04ea8c6ea87dde9878ef80901d8530568698f\": container with ID starting with 6751a1411ded0ee96ad4369894d04ea8c6ea87dde9878ef80901d8530568698f not found: ID does not exist" containerID="6751a1411ded0ee96ad4369894d04ea8c6ea87dde9878ef80901d8530568698f" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.597148 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6751a1411ded0ee96ad4369894d04ea8c6ea87dde9878ef80901d8530568698f"} err="failed to get container status \"6751a1411ded0ee96ad4369894d04ea8c6ea87dde9878ef80901d8530568698f\": rpc error: code = NotFound desc = could not find container \"6751a1411ded0ee96ad4369894d04ea8c6ea87dde9878ef80901d8530568698f\": container with ID starting with 6751a1411ded0ee96ad4369894d04ea8c6ea87dde9878ef80901d8530568698f not found: ID does not exist" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.597174 5002 scope.go:117] "RemoveContainer" containerID="9a18e426d7fdfcfc12f5c08cfed6cc5c9c9010b3fac19c492b2dbe00e7c2f53a" Dec 03 18:21:58 crc kubenswrapper[5002]: E1203 18:21:58.597673 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a18e426d7fdfcfc12f5c08cfed6cc5c9c9010b3fac19c492b2dbe00e7c2f53a\": container with ID starting with 9a18e426d7fdfcfc12f5c08cfed6cc5c9c9010b3fac19c492b2dbe00e7c2f53a not found: ID does not exist" containerID="9a18e426d7fdfcfc12f5c08cfed6cc5c9c9010b3fac19c492b2dbe00e7c2f53a" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.597713 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a18e426d7fdfcfc12f5c08cfed6cc5c9c9010b3fac19c492b2dbe00e7c2f53a"} err="failed to get container status \"9a18e426d7fdfcfc12f5c08cfed6cc5c9c9010b3fac19c492b2dbe00e7c2f53a\": rpc error: code = NotFound desc = could not find container \"9a18e426d7fdfcfc12f5c08cfed6cc5c9c9010b3fac19c492b2dbe00e7c2f53a\": container with ID starting with 9a18e426d7fdfcfc12f5c08cfed6cc5c9c9010b3fac19c492b2dbe00e7c2f53a not found: ID does not exist" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.853692 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82585ffd-99ac-4fb1-8b8d-434fa380bd1f" path="/var/lib/kubelet/pods/82585ffd-99ac-4fb1-8b8d-434fa380bd1f/volumes" Dec 03 18:21:58 crc kubenswrapper[5002]: I1203 18:21:58.854617 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dca089f-c4fc-43a1-a791-87fc9425cfba" path="/var/lib/kubelet/pods/8dca089f-c4fc-43a1-a791-87fc9425cfba/volumes" Dec 03 18:22:00 crc kubenswrapper[5002]: E1203 18:22:00.857653 5002 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.155:35514->38.102.83.155:42635: write tcp 38.102.83.155:35514->38.102.83.155:42635: write: broken pipe Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.015456 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pjrqh/crc-debug-r9ngf"] Dec 03 18:22:02 crc kubenswrapper[5002]: E1203 18:22:02.016160 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dca089f-c4fc-43a1-a791-87fc9425cfba" containerName="registry-server" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.016174 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dca089f-c4fc-43a1-a791-87fc9425cfba" containerName="registry-server" Dec 03 18:22:02 crc kubenswrapper[5002]: E1203 18:22:02.016187 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dca089f-c4fc-43a1-a791-87fc9425cfba" containerName="extract-utilities" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.016193 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dca089f-c4fc-43a1-a791-87fc9425cfba" containerName="extract-utilities" Dec 03 18:22:02 crc kubenswrapper[5002]: E1203 18:22:02.016210 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82585ffd-99ac-4fb1-8b8d-434fa380bd1f" containerName="extract-utilities" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.016216 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="82585ffd-99ac-4fb1-8b8d-434fa380bd1f" containerName="extract-utilities" Dec 03 18:22:02 crc kubenswrapper[5002]: E1203 18:22:02.016230 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dca089f-c4fc-43a1-a791-87fc9425cfba" containerName="extract-content" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.016236 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dca089f-c4fc-43a1-a791-87fc9425cfba" containerName="extract-content" Dec 03 18:22:02 crc kubenswrapper[5002]: E1203 18:22:02.016245 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82585ffd-99ac-4fb1-8b8d-434fa380bd1f" containerName="registry-server" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.016251 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="82585ffd-99ac-4fb1-8b8d-434fa380bd1f" containerName="registry-server" Dec 03 18:22:02 crc kubenswrapper[5002]: E1203 18:22:02.016271 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82585ffd-99ac-4fb1-8b8d-434fa380bd1f" containerName="extract-content" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.016277 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="82585ffd-99ac-4fb1-8b8d-434fa380bd1f" containerName="extract-content" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.016455 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="82585ffd-99ac-4fb1-8b8d-434fa380bd1f" containerName="registry-server" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.016480 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dca089f-c4fc-43a1-a791-87fc9425cfba" containerName="registry-server" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.017175 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.049597 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6zbq\" (UniqueName: \"kubernetes.io/projected/62a8e34e-ac33-474b-9598-50ec8d394373-kube-api-access-z6zbq\") pod \"crc-debug-r9ngf\" (UID: \"62a8e34e-ac33-474b-9598-50ec8d394373\") " pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.049658 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/62a8e34e-ac33-474b-9598-50ec8d394373-host\") pod \"crc-debug-r9ngf\" (UID: \"62a8e34e-ac33-474b-9598-50ec8d394373\") " pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.152010 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6zbq\" (UniqueName: \"kubernetes.io/projected/62a8e34e-ac33-474b-9598-50ec8d394373-kube-api-access-z6zbq\") pod \"crc-debug-r9ngf\" (UID: \"62a8e34e-ac33-474b-9598-50ec8d394373\") " pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.152055 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/62a8e34e-ac33-474b-9598-50ec8d394373-host\") pod \"crc-debug-r9ngf\" (UID: \"62a8e34e-ac33-474b-9598-50ec8d394373\") " pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.152318 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/62a8e34e-ac33-474b-9598-50ec8d394373-host\") pod \"crc-debug-r9ngf\" (UID: \"62a8e34e-ac33-474b-9598-50ec8d394373\") " pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.170712 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6zbq\" (UniqueName: \"kubernetes.io/projected/62a8e34e-ac33-474b-9598-50ec8d394373-kube-api-access-z6zbq\") pod \"crc-debug-r9ngf\" (UID: \"62a8e34e-ac33-474b-9598-50ec8d394373\") " pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.345673 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" Dec 03 18:22:02 crc kubenswrapper[5002]: W1203 18:22:02.389547 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62a8e34e_ac33_474b_9598_50ec8d394373.slice/crio-3297957e9a9a60a690fe2ce174b62037689eba38b4adb89e1be5f86437dc6ca4 WatchSource:0}: Error finding container 3297957e9a9a60a690fe2ce174b62037689eba38b4adb89e1be5f86437dc6ca4: Status 404 returned error can't find the container with id 3297957e9a9a60a690fe2ce174b62037689eba38b4adb89e1be5f86437dc6ca4 Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.392267 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 18:22:02 crc kubenswrapper[5002]: I1203 18:22:02.489683 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" event={"ID":"62a8e34e-ac33-474b-9598-50ec8d394373","Type":"ContainerStarted","Data":"3297957e9a9a60a690fe2ce174b62037689eba38b4adb89e1be5f86437dc6ca4"} Dec 03 18:22:14 crc kubenswrapper[5002]: I1203 18:22:14.603596 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" event={"ID":"62a8e34e-ac33-474b-9598-50ec8d394373","Type":"ContainerStarted","Data":"a9687978fcadcf007983ae67e7915a89e0d2295b48c12657a459dfc9e3aadef8"} Dec 03 18:22:14 crc kubenswrapper[5002]: I1203 18:22:14.631917 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" podStartSLOduration=1.057076184 podStartE2EDuration="12.631896822s" podCreationTimestamp="2025-12-03 18:22:02 +0000 UTC" firstStartedPulling="2025-12-03 18:22:02.392052689 +0000 UTC m=+6645.805874577" lastFinishedPulling="2025-12-03 18:22:13.966873287 +0000 UTC m=+6657.380695215" observedRunningTime="2025-12-03 18:22:14.620319141 +0000 UTC m=+6658.034141039" watchObservedRunningTime="2025-12-03 18:22:14.631896822 +0000 UTC m=+6658.045718730" Dec 03 18:22:20 crc kubenswrapper[5002]: I1203 18:22:20.916959 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:22:20 crc kubenswrapper[5002]: I1203 18:22:20.917459 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:22:29 crc kubenswrapper[5002]: I1203 18:22:29.745807 5002 generic.go:334] "Generic (PLEG): container finished" podID="62a8e34e-ac33-474b-9598-50ec8d394373" containerID="a9687978fcadcf007983ae67e7915a89e0d2295b48c12657a459dfc9e3aadef8" exitCode=0 Dec 03 18:22:29 crc kubenswrapper[5002]: I1203 18:22:29.745910 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" event={"ID":"62a8e34e-ac33-474b-9598-50ec8d394373","Type":"ContainerDied","Data":"a9687978fcadcf007983ae67e7915a89e0d2295b48c12657a459dfc9e3aadef8"} Dec 03 18:22:30 crc kubenswrapper[5002]: I1203 18:22:30.885653 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" Dec 03 18:22:30 crc kubenswrapper[5002]: I1203 18:22:30.958108 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pjrqh/crc-debug-r9ngf"] Dec 03 18:22:30 crc kubenswrapper[5002]: I1203 18:22:30.976812 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pjrqh/crc-debug-r9ngf"] Dec 03 18:22:31 crc kubenswrapper[5002]: I1203 18:22:31.074719 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/62a8e34e-ac33-474b-9598-50ec8d394373-host\") pod \"62a8e34e-ac33-474b-9598-50ec8d394373\" (UID: \"62a8e34e-ac33-474b-9598-50ec8d394373\") " Dec 03 18:22:31 crc kubenswrapper[5002]: I1203 18:22:31.074843 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6zbq\" (UniqueName: \"kubernetes.io/projected/62a8e34e-ac33-474b-9598-50ec8d394373-kube-api-access-z6zbq\") pod \"62a8e34e-ac33-474b-9598-50ec8d394373\" (UID: \"62a8e34e-ac33-474b-9598-50ec8d394373\") " Dec 03 18:22:31 crc kubenswrapper[5002]: I1203 18:22:31.075110 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/62a8e34e-ac33-474b-9598-50ec8d394373-host" (OuterVolumeSpecName: "host") pod "62a8e34e-ac33-474b-9598-50ec8d394373" (UID: "62a8e34e-ac33-474b-9598-50ec8d394373"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 18:22:31 crc kubenswrapper[5002]: I1203 18:22:31.075617 5002 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/62a8e34e-ac33-474b-9598-50ec8d394373-host\") on node \"crc\" DevicePath \"\"" Dec 03 18:22:31 crc kubenswrapper[5002]: I1203 18:22:31.089576 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62a8e34e-ac33-474b-9598-50ec8d394373-kube-api-access-z6zbq" (OuterVolumeSpecName: "kube-api-access-z6zbq") pod "62a8e34e-ac33-474b-9598-50ec8d394373" (UID: "62a8e34e-ac33-474b-9598-50ec8d394373"). InnerVolumeSpecName "kube-api-access-z6zbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:22:31 crc kubenswrapper[5002]: I1203 18:22:31.177545 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6zbq\" (UniqueName: \"kubernetes.io/projected/62a8e34e-ac33-474b-9598-50ec8d394373-kube-api-access-z6zbq\") on node \"crc\" DevicePath \"\"" Dec 03 18:22:31 crc kubenswrapper[5002]: I1203 18:22:31.767244 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3297957e9a9a60a690fe2ce174b62037689eba38b4adb89e1be5f86437dc6ca4" Dec 03 18:22:31 crc kubenswrapper[5002]: I1203 18:22:31.767377 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pjrqh/crc-debug-r9ngf" Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.131780 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pjrqh/crc-debug-f9gzs"] Dec 03 18:22:32 crc kubenswrapper[5002]: E1203 18:22:32.132625 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62a8e34e-ac33-474b-9598-50ec8d394373" containerName="container-00" Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.132646 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="62a8e34e-ac33-474b-9598-50ec8d394373" containerName="container-00" Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.133048 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="62a8e34e-ac33-474b-9598-50ec8d394373" containerName="container-00" Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.134041 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pjrqh/crc-debug-f9gzs" Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.298293 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmlmq\" (UniqueName: \"kubernetes.io/projected/9f2d987d-d6ea-475b-bbab-044267f7f264-kube-api-access-jmlmq\") pod \"crc-debug-f9gzs\" (UID: \"9f2d987d-d6ea-475b-bbab-044267f7f264\") " pod="openshift-must-gather-pjrqh/crc-debug-f9gzs" Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.298335 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f2d987d-d6ea-475b-bbab-044267f7f264-host\") pod \"crc-debug-f9gzs\" (UID: \"9f2d987d-d6ea-475b-bbab-044267f7f264\") " pod="openshift-must-gather-pjrqh/crc-debug-f9gzs" Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.401320 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmlmq\" (UniqueName: \"kubernetes.io/projected/9f2d987d-d6ea-475b-bbab-044267f7f264-kube-api-access-jmlmq\") pod \"crc-debug-f9gzs\" (UID: \"9f2d987d-d6ea-475b-bbab-044267f7f264\") " pod="openshift-must-gather-pjrqh/crc-debug-f9gzs" Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.401400 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f2d987d-d6ea-475b-bbab-044267f7f264-host\") pod \"crc-debug-f9gzs\" (UID: \"9f2d987d-d6ea-475b-bbab-044267f7f264\") " pod="openshift-must-gather-pjrqh/crc-debug-f9gzs" Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.401871 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f2d987d-d6ea-475b-bbab-044267f7f264-host\") pod \"crc-debug-f9gzs\" (UID: \"9f2d987d-d6ea-475b-bbab-044267f7f264\") " pod="openshift-must-gather-pjrqh/crc-debug-f9gzs" Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.428672 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmlmq\" (UniqueName: \"kubernetes.io/projected/9f2d987d-d6ea-475b-bbab-044267f7f264-kube-api-access-jmlmq\") pod \"crc-debug-f9gzs\" (UID: \"9f2d987d-d6ea-475b-bbab-044267f7f264\") " pod="openshift-must-gather-pjrqh/crc-debug-f9gzs" Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.459342 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pjrqh/crc-debug-f9gzs" Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.779150 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pjrqh/crc-debug-f9gzs" event={"ID":"9f2d987d-d6ea-475b-bbab-044267f7f264","Type":"ContainerStarted","Data":"cfdd4a338ac86509bcacbc52177caf30a952b85ab698aea90805f11134f27629"} Dec 03 18:22:32 crc kubenswrapper[5002]: I1203 18:22:32.851686 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62a8e34e-ac33-474b-9598-50ec8d394373" path="/var/lib/kubelet/pods/62a8e34e-ac33-474b-9598-50ec8d394373/volumes" Dec 03 18:22:33 crc kubenswrapper[5002]: I1203 18:22:33.788560 5002 generic.go:334] "Generic (PLEG): container finished" podID="9f2d987d-d6ea-475b-bbab-044267f7f264" containerID="2d6e741e98039ec2ece22ba705a00b4a4918b5f820ead9d3fbeea790cf4069b4" exitCode=1 Dec 03 18:22:33 crc kubenswrapper[5002]: I1203 18:22:33.788670 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pjrqh/crc-debug-f9gzs" event={"ID":"9f2d987d-d6ea-475b-bbab-044267f7f264","Type":"ContainerDied","Data":"2d6e741e98039ec2ece22ba705a00b4a4918b5f820ead9d3fbeea790cf4069b4"} Dec 03 18:22:33 crc kubenswrapper[5002]: I1203 18:22:33.825896 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pjrqh/crc-debug-f9gzs"] Dec 03 18:22:33 crc kubenswrapper[5002]: I1203 18:22:33.833108 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pjrqh/crc-debug-f9gzs"] Dec 03 18:22:34 crc kubenswrapper[5002]: I1203 18:22:34.912044 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pjrqh/crc-debug-f9gzs" Dec 03 18:22:35 crc kubenswrapper[5002]: I1203 18:22:35.060242 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f2d987d-d6ea-475b-bbab-044267f7f264-host\") pod \"9f2d987d-d6ea-475b-bbab-044267f7f264\" (UID: \"9f2d987d-d6ea-475b-bbab-044267f7f264\") " Dec 03 18:22:35 crc kubenswrapper[5002]: I1203 18:22:35.060397 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9f2d987d-d6ea-475b-bbab-044267f7f264-host" (OuterVolumeSpecName: "host") pod "9f2d987d-d6ea-475b-bbab-044267f7f264" (UID: "9f2d987d-d6ea-475b-bbab-044267f7f264"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 18:22:35 crc kubenswrapper[5002]: I1203 18:22:35.060430 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmlmq\" (UniqueName: \"kubernetes.io/projected/9f2d987d-d6ea-475b-bbab-044267f7f264-kube-api-access-jmlmq\") pod \"9f2d987d-d6ea-475b-bbab-044267f7f264\" (UID: \"9f2d987d-d6ea-475b-bbab-044267f7f264\") " Dec 03 18:22:35 crc kubenswrapper[5002]: I1203 18:22:35.061013 5002 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f2d987d-d6ea-475b-bbab-044267f7f264-host\") on node \"crc\" DevicePath \"\"" Dec 03 18:22:35 crc kubenswrapper[5002]: I1203 18:22:35.069212 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f2d987d-d6ea-475b-bbab-044267f7f264-kube-api-access-jmlmq" (OuterVolumeSpecName: "kube-api-access-jmlmq") pod "9f2d987d-d6ea-475b-bbab-044267f7f264" (UID: "9f2d987d-d6ea-475b-bbab-044267f7f264"). InnerVolumeSpecName "kube-api-access-jmlmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:22:35 crc kubenswrapper[5002]: I1203 18:22:35.162802 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmlmq\" (UniqueName: \"kubernetes.io/projected/9f2d987d-d6ea-475b-bbab-044267f7f264-kube-api-access-jmlmq\") on node \"crc\" DevicePath \"\"" Dec 03 18:22:35 crc kubenswrapper[5002]: I1203 18:22:35.809622 5002 scope.go:117] "RemoveContainer" containerID="2d6e741e98039ec2ece22ba705a00b4a4918b5f820ead9d3fbeea790cf4069b4" Dec 03 18:22:35 crc kubenswrapper[5002]: I1203 18:22:35.809692 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pjrqh/crc-debug-f9gzs" Dec 03 18:22:36 crc kubenswrapper[5002]: I1203 18:22:36.851256 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f2d987d-d6ea-475b-bbab-044267f7f264" path="/var/lib/kubelet/pods/9f2d987d-d6ea-475b-bbab-044267f7f264/volumes" Dec 03 18:22:43 crc kubenswrapper[5002]: I1203 18:22:43.047032 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-create-pgbtx"] Dec 03 18:22:43 crc kubenswrapper[5002]: I1203 18:22:43.057933 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-create-pgbtx"] Dec 03 18:22:44 crc kubenswrapper[5002]: I1203 18:22:44.855599 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0a46695-010b-4d5a-96a7-d5e765f77829" path="/var/lib/kubelet/pods/e0a46695-010b-4d5a-96a7-d5e765f77829/volumes" Dec 03 18:22:45 crc kubenswrapper[5002]: I1203 18:22:45.030035 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-074b-account-create-update-76rsl"] Dec 03 18:22:45 crc kubenswrapper[5002]: I1203 18:22:45.038834 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-074b-account-create-update-76rsl"] Dec 03 18:22:46 crc kubenswrapper[5002]: I1203 18:22:46.853668 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76498b2c-a00b-48ef-b1b2-52ed87922f4d" path="/var/lib/kubelet/pods/76498b2c-a00b-48ef-b1b2-52ed87922f4d/volumes" Dec 03 18:22:48 crc kubenswrapper[5002]: I1203 18:22:48.699705 5002 scope.go:117] "RemoveContainer" containerID="25cac7748370be4fff083e629514384611540c94e73c0ba6fdc51aa139aaa2b5" Dec 03 18:22:48 crc kubenswrapper[5002]: I1203 18:22:48.735617 5002 scope.go:117] "RemoveContainer" containerID="d6b046fbf09afdc445bf8d517de92625c290f5cbf9e8952d5e7beaa3f1b6a3c9" Dec 03 18:22:48 crc kubenswrapper[5002]: I1203 18:22:48.793642 5002 scope.go:117] "RemoveContainer" containerID="d18307feb766083fb7706fc9f091a550449b218e8af9efd6e450b86141af2149" Dec 03 18:22:48 crc kubenswrapper[5002]: I1203 18:22:48.822495 5002 scope.go:117] "RemoveContainer" containerID="7e953a1385980131b4073c1602ef50b1f91ab923775b6ff7e191292e7a8f9359" Dec 03 18:22:50 crc kubenswrapper[5002]: I1203 18:22:50.916301 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:22:50 crc kubenswrapper[5002]: I1203 18:22:50.918565 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:22:51 crc kubenswrapper[5002]: I1203 18:22:51.028546 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-persistence-db-create-t9vk8"] Dec 03 18:22:51 crc kubenswrapper[5002]: I1203 18:22:51.039391 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-persistence-db-create-t9vk8"] Dec 03 18:22:52 crc kubenswrapper[5002]: I1203 18:22:52.034000 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-61c3-account-create-update-lpqs7"] Dec 03 18:22:52 crc kubenswrapper[5002]: I1203 18:22:52.042561 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-61c3-account-create-update-lpqs7"] Dec 03 18:22:52 crc kubenswrapper[5002]: I1203 18:22:52.889030 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1addd8f3-d5dd-49dd-a2ee-0846cb3e380d" path="/var/lib/kubelet/pods/1addd8f3-d5dd-49dd-a2ee-0846cb3e380d/volumes" Dec 03 18:22:52 crc kubenswrapper[5002]: I1203 18:22:52.890320 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9403328-ad80-45b1-96aa-3216038c0752" path="/var/lib/kubelet/pods/e9403328-ad80-45b1-96aa-3216038c0752/volumes" Dec 03 18:23:20 crc kubenswrapper[5002]: I1203 18:23:20.802485 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_17e91ab9-8baa-4b7f-b87e-99614ee85a63/init-config-reloader/0.log" Dec 03 18:23:20 crc kubenswrapper[5002]: I1203 18:23:20.917103 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:23:20 crc kubenswrapper[5002]: I1203 18:23:20.917496 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:23:20 crc kubenswrapper[5002]: I1203 18:23:20.917546 5002 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" Dec 03 18:23:20 crc kubenswrapper[5002]: I1203 18:23:20.918586 5002 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a"} pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 18:23:20 crc kubenswrapper[5002]: I1203 18:23:20.918649 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" containerID="cri-o://f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" gracePeriod=600 Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.030092 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_17e91ab9-8baa-4b7f-b87e-99614ee85a63/init-config-reloader/0.log" Dec 03 18:23:21 crc kubenswrapper[5002]: E1203 18:23:21.043706 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.096194 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_17e91ab9-8baa-4b7f-b87e-99614ee85a63/alertmanager/0.log" Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.165497 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_17e91ab9-8baa-4b7f-b87e-99614ee85a63/config-reloader/0.log" Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.302510 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_d767077e-2792-43d7-aedc-638ddd3adb65/aodh-api/0.log" Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.342006 5002 generic.go:334] "Generic (PLEG): container finished" podID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" exitCode=0 Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.342055 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerDied","Data":"f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a"} Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.342096 5002 scope.go:117] "RemoveContainer" containerID="7911f9c0b193aa85df29cc29463813c7f08c105d2a294d7ed9ab3be4f79d1c9b" Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.342918 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:23:21 crc kubenswrapper[5002]: E1203 18:23:21.343258 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.360457 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_d767077e-2792-43d7-aedc-638ddd3adb65/aodh-evaluator/0.log" Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.478321 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_d767077e-2792-43d7-aedc-638ddd3adb65/aodh-listener/0.log" Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.500439 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_d767077e-2792-43d7-aedc-638ddd3adb65/aodh-notifier/0.log" Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.578495 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-6a1c-account-create-update-5dx2j_74bd124f-de70-4274-86be-640d56813b9f/mariadb-account-create-update/0.log" Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.702545 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-db-create-rbf5d_6acc2704-72e1-4467-932f-8cb49c2eb422/mariadb-database-create/0.log" Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.823430 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-db-sync-4ndlh_be1e8c59-f401-4ea7-aada-c65cb303729a/aodh-db-sync/0.log" Dec 03 18:23:21 crc kubenswrapper[5002]: I1203 18:23:21.978389 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-65468cf9b-tkjp6_ff8ecbd1-789f-490c-a96b-4b2b15d50352/barbican-api/0.log" Dec 03 18:23:22 crc kubenswrapper[5002]: I1203 18:23:22.020981 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-65468cf9b-tkjp6_ff8ecbd1-789f-490c-a96b-4b2b15d50352/barbican-api-log/0.log" Dec 03 18:23:22 crc kubenswrapper[5002]: I1203 18:23:22.165060 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6f4548857b-5c9mn_f6f2f55a-2e14-44c7-baf4-8db6675cf9f3/barbican-keystone-listener/0.log" Dec 03 18:23:22 crc kubenswrapper[5002]: I1203 18:23:22.227798 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6f4548857b-5c9mn_f6f2f55a-2e14-44c7-baf4-8db6675cf9f3/barbican-keystone-listener-log/0.log" Dec 03 18:23:22 crc kubenswrapper[5002]: I1203 18:23:22.337694 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c75fb47bc-c7g9j_aad241d9-99ca-4f3c-b980-ab2f989b754f/barbican-worker/0.log" Dec 03 18:23:22 crc kubenswrapper[5002]: I1203 18:23:22.373380 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c75fb47bc-c7g9j_aad241d9-99ca-4f3c-b980-ab2f989b754f/barbican-worker-log/0.log" Dec 03 18:23:22 crc kubenswrapper[5002]: I1203 18:23:22.528454 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1f890031-6344-4527-b611-df1280e07069/ceilometer-central-agent/0.log" Dec 03 18:23:22 crc kubenswrapper[5002]: I1203 18:23:22.528914 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1f890031-6344-4527-b611-df1280e07069/ceilometer-notification-agent/0.log" Dec 03 18:23:22 crc kubenswrapper[5002]: I1203 18:23:22.538312 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1f890031-6344-4527-b611-df1280e07069/proxy-httpd/0.log" Dec 03 18:23:22 crc kubenswrapper[5002]: I1203 18:23:22.852411 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1f890031-6344-4527-b611-df1280e07069/sg-core/0.log" Dec 03 18:23:22 crc kubenswrapper[5002]: I1203 18:23:22.950544 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_c77eef01-0f3d-4977-b89d-115010fe9491/cinder-api/0.log" Dec 03 18:23:23 crc kubenswrapper[5002]: I1203 18:23:23.012295 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_c77eef01-0f3d-4977-b89d-115010fe9491/cinder-api-log/0.log" Dec 03 18:23:23 crc kubenswrapper[5002]: I1203 18:23:23.191114 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_72b3be30-cf6e-4948-8c2b-6ab8e4144f06/cinder-scheduler/0.log" Dec 03 18:23:23 crc kubenswrapper[5002]: I1203 18:23:23.220780 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_72b3be30-cf6e-4948-8c2b-6ab8e4144f06/probe/0.log" Dec 03 18:23:23 crc kubenswrapper[5002]: I1203 18:23:23.329627 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59dfd5fdc9-m86d8_8e7ff606-dfd3-4012-ad37-d96373c36ee8/init/0.log" Dec 03 18:23:23 crc kubenswrapper[5002]: I1203 18:23:23.513587 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59dfd5fdc9-m86d8_8e7ff606-dfd3-4012-ad37-d96373c36ee8/init/0.log" Dec 03 18:23:23 crc kubenswrapper[5002]: I1203 18:23:23.522438 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59dfd5fdc9-m86d8_8e7ff606-dfd3-4012-ad37-d96373c36ee8/dnsmasq-dns/0.log" Dec 03 18:23:23 crc kubenswrapper[5002]: I1203 18:23:23.573758 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e10e2591-b00b-450f-a558-bc66ee50c347/glance-httpd/0.log" Dec 03 18:23:23 crc kubenswrapper[5002]: I1203 18:23:23.739530 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e10e2591-b00b-450f-a558-bc66ee50c347/glance-log/0.log" Dec 03 18:23:23 crc kubenswrapper[5002]: I1203 18:23:23.797576 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_50e35719-3587-4625-9499-fbe4c047c6df/glance-httpd/0.log" Dec 03 18:23:23 crc kubenswrapper[5002]: I1203 18:23:23.871426 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_50e35719-3587-4625-9499-fbe4c047c6df/glance-log/0.log" Dec 03 18:23:23 crc kubenswrapper[5002]: I1203 18:23:23.994424 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-70e9-account-create-update-sp89s_a49549ac-d109-4567-b79c-a2df131387aa/mariadb-account-create-update/0.log" Dec 03 18:23:24 crc kubenswrapper[5002]: I1203 18:23:24.146648 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-7bf9f874bd-lg5mg_c59ef7e0-b477-4e81-abf0-ac5212a5c546/heat-api/0.log" Dec 03 18:23:24 crc kubenswrapper[5002]: I1203 18:23:24.361398 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-ff4576f96-gqvql_144a5d00-65c6-445a-ad71-2de503e96a0c/heat-cfnapi/0.log" Dec 03 18:23:24 crc kubenswrapper[5002]: I1203 18:23:24.373059 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-db-create-szwjx_e4973cbe-4313-4bdd-af74-81dade285f65/mariadb-database-create/0.log" Dec 03 18:23:24 crc kubenswrapper[5002]: I1203 18:23:24.525919 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-db-sync-tdkwj_94a46076-2c6c-48fe-8ec3-6b239ab5aa55/heat-db-sync/0.log" Dec 03 18:23:24 crc kubenswrapper[5002]: I1203 18:23:24.678413 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-855bcc4775-7nbfz_ac58b8f9-9adc-47b2-a4bf-3023e9dc67bd/heat-engine/0.log" Dec 03 18:23:24 crc kubenswrapper[5002]: I1203 18:23:24.830354 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-57d6578878-lnnzf_15a59bf6-beb9-43f8-b192-5f3dfe627c28/horizon-log/0.log" Dec 03 18:23:24 crc kubenswrapper[5002]: I1203 18:23:24.850058 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-57d6578878-lnnzf_15a59bf6-beb9-43f8-b192-5f3dfe627c28/horizon/0.log" Dec 03 18:23:25 crc kubenswrapper[5002]: I1203 18:23:25.040950 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-75d97dc8b-p2q9t_b74fe12c-0452-40fb-be2f-6d7024507a34/keystone-api/0.log" Dec 03 18:23:25 crc kubenswrapper[5002]: I1203 18:23:25.076500 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_e8804817-b0bf-4ba4-98f9-0ff63cad140f/kube-state-metrics/0.log" Dec 03 18:23:25 crc kubenswrapper[5002]: I1203 18:23:25.139060 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-copy-data_9e10bc3b-b369-48e1-981a-81b8f5286964/adoption/0.log" Dec 03 18:23:25 crc kubenswrapper[5002]: I1203 18:23:25.435060 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9f76d4b69-zqwxj_0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758/neutron-api/0.log" Dec 03 18:23:25 crc kubenswrapper[5002]: I1203 18:23:25.441657 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9f76d4b69-zqwxj_0bc5dfa5-8a44-4bc7-9f0a-be9a9d7ab758/neutron-httpd/0.log" Dec 03 18:23:25 crc kubenswrapper[5002]: I1203 18:23:25.817235 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_db27d6a5-7d81-448f-9aa1-eff2c429c52c/nova-api-log/0.log" Dec 03 18:23:25 crc kubenswrapper[5002]: I1203 18:23:25.825801 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_db27d6a5-7d81-448f-9aa1-eff2c429c52c/nova-api-api/0.log" Dec 03 18:23:25 crc kubenswrapper[5002]: I1203 18:23:25.894882 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_9db10677-c084-4264-bcc2-d8db607ce26b/nova-cell0-conductor-conductor/0.log" Dec 03 18:23:26 crc kubenswrapper[5002]: I1203 18:23:26.083971 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_67e2c7b8-f8f4-48c8-a384-66c38910868a/nova-cell1-conductor-conductor/0.log" Dec 03 18:23:26 crc kubenswrapper[5002]: I1203 18:23:26.239085 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_a15b6552-1779-4bb6-88fb-bc77c4eed74e/nova-cell1-novncproxy-novncproxy/0.log" Dec 03 18:23:26 crc kubenswrapper[5002]: I1203 18:23:26.478181 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_54ec48d2-46c8-4162-93a5-50355a8637a9/nova-metadata-log/0.log" Dec 03 18:23:26 crc kubenswrapper[5002]: I1203 18:23:26.840910 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_bea47cf0-de73-4742-ae2b-8c344f3e0fb5/nova-scheduler-scheduler/0.log" Dec 03 18:23:26 crc kubenswrapper[5002]: I1203 18:23:26.924602 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6547c5866b-kf9h4_dfb421b6-29a9-403a-a0b6-830781851826/init/0.log" Dec 03 18:23:26 crc kubenswrapper[5002]: I1203 18:23:26.925622 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_54ec48d2-46c8-4162-93a5-50355a8637a9/nova-metadata-metadata/0.log" Dec 03 18:23:27 crc kubenswrapper[5002]: I1203 18:23:27.161304 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6547c5866b-kf9h4_dfb421b6-29a9-403a-a0b6-830781851826/init/0.log" Dec 03 18:23:27 crc kubenswrapper[5002]: I1203 18:23:27.266711 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6547c5866b-kf9h4_dfb421b6-29a9-403a-a0b6-830781851826/octavia-api-provider-agent/0.log" Dec 03 18:23:27 crc kubenswrapper[5002]: I1203 18:23:27.390503 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-db-sync-sps4w_6533ba9f-610a-4fef-8c1b-8a519b0f8957/init/0.log" Dec 03 18:23:27 crc kubenswrapper[5002]: I1203 18:23:27.402713 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6547c5866b-kf9h4_dfb421b6-29a9-403a-a0b6-830781851826/octavia-api/0.log" Dec 03 18:23:27 crc kubenswrapper[5002]: I1203 18:23:27.589872 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-db-sync-sps4w_6533ba9f-610a-4fef-8c1b-8a519b0f8957/init/0.log" Dec 03 18:23:27 crc kubenswrapper[5002]: I1203 18:23:27.629652 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-db-sync-sps4w_6533ba9f-610a-4fef-8c1b-8a519b0f8957/octavia-db-sync/0.log" Dec 03 18:23:27 crc kubenswrapper[5002]: I1203 18:23:27.684418 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-b4mlk_015396fc-f13a-4bc0-a8e6-6f2a4d6d97db/init/0.log" Dec 03 18:23:27 crc kubenswrapper[5002]: I1203 18:23:27.865590 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-b4mlk_015396fc-f13a-4bc0-a8e6-6f2a4d6d97db/init/0.log" Dec 03 18:23:27 crc kubenswrapper[5002]: I1203 18:23:27.940526 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-b4mlk_015396fc-f13a-4bc0-a8e6-6f2a4d6d97db/octavia-healthmanager/0.log" Dec 03 18:23:27 crc kubenswrapper[5002]: I1203 18:23:27.960586 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-v26l4_32afa6b4-a338-4399-8544-eb7ded9089ed/init/0.log" Dec 03 18:23:28 crc kubenswrapper[5002]: I1203 18:23:28.152659 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-v26l4_32afa6b4-a338-4399-8544-eb7ded9089ed/octavia-housekeeping/0.log" Dec 03 18:23:28 crc kubenswrapper[5002]: I1203 18:23:28.213058 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-v26l4_32afa6b4-a338-4399-8544-eb7ded9089ed/init/0.log" Dec 03 18:23:28 crc kubenswrapper[5002]: I1203 18:23:28.225220 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-56c9f55b99-sz5gd_5fe45db2-057b-47c6-8dbc-310ada8df8bd/init/0.log" Dec 03 18:23:28 crc kubenswrapper[5002]: I1203 18:23:28.494036 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-56c9f55b99-sz5gd_5fe45db2-057b-47c6-8dbc-310ada8df8bd/octavia-amphora-httpd/0.log" Dec 03 18:23:28 crc kubenswrapper[5002]: I1203 18:23:28.542110 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-kn7lw_c01abf35-dc93-4b56-9da5-ed1f312d9402/init/0.log" Dec 03 18:23:28 crc kubenswrapper[5002]: I1203 18:23:28.549941 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-56c9f55b99-sz5gd_5fe45db2-057b-47c6-8dbc-310ada8df8bd/init/0.log" Dec 03 18:23:28 crc kubenswrapper[5002]: I1203 18:23:28.751394 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-kn7lw_c01abf35-dc93-4b56-9da5-ed1f312d9402/init/0.log" Dec 03 18:23:28 crc kubenswrapper[5002]: I1203 18:23:28.786216 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-jpwzl_9f521da5-5148-443f-94ec-6561e981c3b4/init/0.log" Dec 03 18:23:28 crc kubenswrapper[5002]: I1203 18:23:28.844089 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-kn7lw_c01abf35-dc93-4b56-9da5-ed1f312d9402/octavia-rsyslog/0.log" Dec 03 18:23:29 crc kubenswrapper[5002]: I1203 18:23:29.082768 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-jpwzl_9f521da5-5148-443f-94ec-6561e981c3b4/init/0.log" Dec 03 18:23:29 crc kubenswrapper[5002]: I1203 18:23:29.114151 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_05e199a0-a421-4d94-8454-a02e66aca0c9/mysql-bootstrap/0.log" Dec 03 18:23:29 crc kubenswrapper[5002]: I1203 18:23:29.232966 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-jpwzl_9f521da5-5148-443f-94ec-6561e981c3b4/octavia-worker/0.log" Dec 03 18:23:29 crc kubenswrapper[5002]: I1203 18:23:29.337559 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_05e199a0-a421-4d94-8454-a02e66aca0c9/mysql-bootstrap/0.log" Dec 03 18:23:29 crc kubenswrapper[5002]: I1203 18:23:29.343870 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_05e199a0-a421-4d94-8454-a02e66aca0c9/galera/0.log" Dec 03 18:23:29 crc kubenswrapper[5002]: I1203 18:23:29.446548 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_963808a8-caf6-4d66-a86b-be61b550a6a0/mysql-bootstrap/0.log" Dec 03 18:23:29 crc kubenswrapper[5002]: I1203 18:23:29.654034 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_b67cb4b3-26bf-46b9-9f1e-e0ef8abdcd50/openstackclient/0.log" Dec 03 18:23:29 crc kubenswrapper[5002]: I1203 18:23:29.687120 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_963808a8-caf6-4d66-a86b-be61b550a6a0/mysql-bootstrap/0.log" Dec 03 18:23:29 crc kubenswrapper[5002]: I1203 18:23:29.709278 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_963808a8-caf6-4d66-a86b-be61b550a6a0/galera/0.log" Dec 03 18:23:29 crc kubenswrapper[5002]: I1203 18:23:29.889198 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-6hmtl_29c9f160-f344-443a-ad8b-854333e89938/openstack-network-exporter/0.log" Dec 03 18:23:29 crc kubenswrapper[5002]: I1203 18:23:29.911760 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9l9t9_0d660dfd-a3db-4786-a42e-33169069d286/ovsdb-server-init/0.log" Dec 03 18:23:30 crc kubenswrapper[5002]: I1203 18:23:30.250139 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9l9t9_0d660dfd-a3db-4786-a42e-33169069d286/ovsdb-server-init/0.log" Dec 03 18:23:30 crc kubenswrapper[5002]: I1203 18:23:30.282080 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9l9t9_0d660dfd-a3db-4786-a42e-33169069d286/ovsdb-server/0.log" Dec 03 18:23:30 crc kubenswrapper[5002]: I1203 18:23:30.311105 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9l9t9_0d660dfd-a3db-4786-a42e-33169069d286/ovs-vswitchd/0.log" Dec 03 18:23:30 crc kubenswrapper[5002]: I1203 18:23:30.559876 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-rncbs_e5d56718-3545-46f8-a9f5-a457c4255b0d/ovn-controller/0.log" Dec 03 18:23:30 crc kubenswrapper[5002]: I1203 18:23:30.647297 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-copy-data_c69d8501-5a8d-428c-8df0-7af9b5a3d1ed/adoption/0.log" Dec 03 18:23:30 crc kubenswrapper[5002]: I1203 18:23:30.947196 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_95a49eca-0a96-460d-8366-7a65e3a93c4c/ovn-northd/0.log" Dec 03 18:23:30 crc kubenswrapper[5002]: I1203 18:23:30.957394 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_95a49eca-0a96-460d-8366-7a65e3a93c4c/openstack-network-exporter/0.log" Dec 03 18:23:31 crc kubenswrapper[5002]: I1203 18:23:31.136694 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_73ec4b95-58eb-46f5-95af-b46497c25bf6/openstack-network-exporter/0.log" Dec 03 18:23:31 crc kubenswrapper[5002]: I1203 18:23:31.148639 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_73ec4b95-58eb-46f5-95af-b46497c25bf6/ovsdbserver-nb/0.log" Dec 03 18:23:31 crc kubenswrapper[5002]: I1203 18:23:31.291834 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_2a94b07d-4cab-49af-bc42-c8dff252abc6/openstack-network-exporter/0.log" Dec 03 18:23:31 crc kubenswrapper[5002]: I1203 18:23:31.420436 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_2a94b07d-4cab-49af-bc42-c8dff252abc6/ovsdbserver-nb/0.log" Dec 03 18:23:31 crc kubenswrapper[5002]: I1203 18:23:31.489679 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_fa833e23-a37d-488c-9f37-24d66c288ab9/openstack-network-exporter/0.log" Dec 03 18:23:31 crc kubenswrapper[5002]: I1203 18:23:31.573023 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_fa833e23-a37d-488c-9f37-24d66c288ab9/ovsdbserver-nb/0.log" Dec 03 18:23:31 crc kubenswrapper[5002]: I1203 18:23:31.657254 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_f99a14c5-cbbd-4f08-aff8-3c8e921a0850/openstack-network-exporter/0.log" Dec 03 18:23:31 crc kubenswrapper[5002]: I1203 18:23:31.744985 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_f99a14c5-cbbd-4f08-aff8-3c8e921a0850/ovsdbserver-sb/0.log" Dec 03 18:23:31 crc kubenswrapper[5002]: I1203 18:23:31.847298 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_58360bc1-f032-4bdd-b8c5-e2250f44e965/openstack-network-exporter/0.log" Dec 03 18:23:31 crc kubenswrapper[5002]: I1203 18:23:31.994112 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_58360bc1-f032-4bdd-b8c5-e2250f44e965/ovsdbserver-sb/0.log" Dec 03 18:23:32 crc kubenswrapper[5002]: I1203 18:23:32.065170 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_d21b1d77-1894-4df5-954c-35c9eb4e7780/openstack-network-exporter/0.log" Dec 03 18:23:32 crc kubenswrapper[5002]: I1203 18:23:32.358106 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_d21b1d77-1894-4df5-954c-35c9eb4e7780/ovsdbserver-sb/0.log" Dec 03 18:23:32 crc kubenswrapper[5002]: I1203 18:23:32.368885 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7b44f789c8-ntdkh_03abc328-424e-4136-8820-ebe63d719343/placement-api/0.log" Dec 03 18:23:32 crc kubenswrapper[5002]: I1203 18:23:32.380951 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7b44f789c8-ntdkh_03abc328-424e-4136-8820-ebe63d719343/placement-log/0.log" Dec 03 18:23:32 crc kubenswrapper[5002]: I1203 18:23:32.624900 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aad2275f-1879-49ef-b51f-79efc4cc39bb/init-config-reloader/0.log" Dec 03 18:23:32 crc kubenswrapper[5002]: I1203 18:23:32.793400 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aad2275f-1879-49ef-b51f-79efc4cc39bb/init-config-reloader/0.log" Dec 03 18:23:32 crc kubenswrapper[5002]: I1203 18:23:32.814461 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aad2275f-1879-49ef-b51f-79efc4cc39bb/config-reloader/0.log" Dec 03 18:23:32 crc kubenswrapper[5002]: I1203 18:23:32.852345 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aad2275f-1879-49ef-b51f-79efc4cc39bb/prometheus/0.log" Dec 03 18:23:32 crc kubenswrapper[5002]: I1203 18:23:32.911669 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aad2275f-1879-49ef-b51f-79efc4cc39bb/thanos-sidecar/0.log" Dec 03 18:23:33 crc kubenswrapper[5002]: I1203 18:23:33.087455 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_08191975-de54-4c2b-9776-d9b9a82211c5/setup-container/0.log" Dec 03 18:23:33 crc kubenswrapper[5002]: I1203 18:23:33.304917 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_08191975-de54-4c2b-9776-d9b9a82211c5/rabbitmq/0.log" Dec 03 18:23:33 crc kubenswrapper[5002]: I1203 18:23:33.313846 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c4dade6f-e9f1-416a-a766-12f292375b21/setup-container/0.log" Dec 03 18:23:33 crc kubenswrapper[5002]: I1203 18:23:33.351964 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_08191975-de54-4c2b-9776-d9b9a82211c5/setup-container/0.log" Dec 03 18:23:33 crc kubenswrapper[5002]: I1203 18:23:33.631240 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c4dade6f-e9f1-416a-a766-12f292375b21/setup-container/0.log" Dec 03 18:23:33 crc kubenswrapper[5002]: I1203 18:23:33.661399 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c4dade6f-e9f1-416a-a766-12f292375b21/rabbitmq/0.log" Dec 03 18:23:33 crc kubenswrapper[5002]: I1203 18:23:33.729610 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-797d98844b-nvhv6_8b47040d-5203-453c-af26-fc72fed2651c/proxy-httpd/0.log" Dec 03 18:23:33 crc kubenswrapper[5002]: I1203 18:23:33.940354 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-797d98844b-nvhv6_8b47040d-5203-453c-af26-fc72fed2651c/proxy-server/0.log" Dec 03 18:23:33 crc kubenswrapper[5002]: I1203 18:23:33.967772 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-j5sgf_516c703e-41d6-4219-9af9-183f93fed43a/swift-ring-rebalance/0.log" Dec 03 18:23:34 crc kubenswrapper[5002]: I1203 18:23:34.844069 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:23:34 crc kubenswrapper[5002]: E1203 18:23:34.844513 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:23:40 crc kubenswrapper[5002]: I1203 18:23:40.060007 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-sync-sps4w"] Dec 03 18:23:40 crc kubenswrapper[5002]: I1203 18:23:40.072207 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-sync-sps4w"] Dec 03 18:23:40 crc kubenswrapper[5002]: I1203 18:23:40.858356 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6533ba9f-610a-4fef-8c1b-8a519b0f8957" path="/var/lib/kubelet/pods/6533ba9f-610a-4fef-8c1b-8a519b0f8957/volumes" Dec 03 18:23:49 crc kubenswrapper[5002]: I1203 18:23:49.178446 5002 scope.go:117] "RemoveContainer" containerID="8e47d3edf212aa23036ba85a5e490a98fa1d5d7d8897c2a9a912a1a200e58c69" Dec 03 18:23:49 crc kubenswrapper[5002]: I1203 18:23:49.216839 5002 scope.go:117] "RemoveContainer" containerID="58a94420c54bb12a185c806815cadde017e694e62e91ebb1a13ccb85b91acbb4" Dec 03 18:23:49 crc kubenswrapper[5002]: I1203 18:23:49.237292 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_d5f3a5b4-4fa3-478d-afac-03ed26d8b3d7/memcached/0.log" Dec 03 18:23:49 crc kubenswrapper[5002]: I1203 18:23:49.262217 5002 scope.go:117] "RemoveContainer" containerID="52cbce8f47bb0bfc28378a99fdd89d415d1b52ffcded64913d611cb314b66895" Dec 03 18:23:49 crc kubenswrapper[5002]: I1203 18:23:49.308189 5002 scope.go:117] "RemoveContainer" containerID="d3e8163d0b181d68d33c703c781c072b6418bfbcc16a8541e4c7d0984467b000" Dec 03 18:23:49 crc kubenswrapper[5002]: I1203 18:23:49.331526 5002 scope.go:117] "RemoveContainer" containerID="b45b06fc50de951c4a57e6f843f221cf0b7f8fd642684cb159e66e13b9acac0a" Dec 03 18:23:49 crc kubenswrapper[5002]: I1203 18:23:49.390687 5002 scope.go:117] "RemoveContainer" containerID="86d752b15508879d905147fa3af8eb85aa9fffd4a7b1afe7cce891aaa317352d" Dec 03 18:23:49 crc kubenswrapper[5002]: I1203 18:23:49.433934 5002 scope.go:117] "RemoveContainer" containerID="90eb3cb4281daad886bae4594391be43309b0d20752cb67f74a30f2858a2b5bd" Dec 03 18:23:49 crc kubenswrapper[5002]: I1203 18:23:49.841020 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:23:49 crc kubenswrapper[5002]: E1203 18:23:49.841236 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:24:00 crc kubenswrapper[5002]: I1203 18:24:00.840130 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:24:00 crc kubenswrapper[5002]: E1203 18:24:00.840925 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:24:03 crc kubenswrapper[5002]: I1203 18:24:03.367936 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49_6f94aeac-86c4-4982-a520-1748553b83ac/util/0.log" Dec 03 18:24:03 crc kubenswrapper[5002]: I1203 18:24:03.508739 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49_6f94aeac-86c4-4982-a520-1748553b83ac/util/0.log" Dec 03 18:24:03 crc kubenswrapper[5002]: I1203 18:24:03.528887 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49_6f94aeac-86c4-4982-a520-1748553b83ac/pull/0.log" Dec 03 18:24:03 crc kubenswrapper[5002]: I1203 18:24:03.557525 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49_6f94aeac-86c4-4982-a520-1748553b83ac/pull/0.log" Dec 03 18:24:03 crc kubenswrapper[5002]: I1203 18:24:03.680976 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49_6f94aeac-86c4-4982-a520-1748553b83ac/util/0.log" Dec 03 18:24:03 crc kubenswrapper[5002]: I1203 18:24:03.705247 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49_6f94aeac-86c4-4982-a520-1748553b83ac/pull/0.log" Dec 03 18:24:03 crc kubenswrapper[5002]: I1203 18:24:03.719191 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_98dc3bd0b5c63de8bc52e3558b9d3e72fafafb6fd127fd2510d2206864l8k49_6f94aeac-86c4-4982-a520-1748553b83ac/extract/0.log" Dec 03 18:24:03 crc kubenswrapper[5002]: I1203 18:24:03.897529 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-bp8bt_fbb180bd-c957-4362-a7ac-04065940a34e/kube-rbac-proxy/0.log" Dec 03 18:24:03 crc kubenswrapper[5002]: I1203 18:24:03.996507 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-tjcc7_240013fb-5ef1-4923-8c16-3967ff1a47e8/kube-rbac-proxy/0.log" Dec 03 18:24:04 crc kubenswrapper[5002]: I1203 18:24:04.019282 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-bp8bt_fbb180bd-c957-4362-a7ac-04065940a34e/manager/0.log" Dec 03 18:24:04 crc kubenswrapper[5002]: I1203 18:24:04.134608 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-tjcc7_240013fb-5ef1-4923-8c16-3967ff1a47e8/manager/0.log" Dec 03 18:24:04 crc kubenswrapper[5002]: I1203 18:24:04.165651 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-w5vlv_7bb9718d-9129-421f-8f7e-8b5c8d7d9e53/kube-rbac-proxy/0.log" Dec 03 18:24:04 crc kubenswrapper[5002]: I1203 18:24:04.193061 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-w5vlv_7bb9718d-9129-421f-8f7e-8b5c8d7d9e53/manager/0.log" Dec 03 18:24:04 crc kubenswrapper[5002]: I1203 18:24:04.345081 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-s6d9w_a25d04de-e230-4750-a4c9-6a43bf344b9e/kube-rbac-proxy/0.log" Dec 03 18:24:04 crc kubenswrapper[5002]: I1203 18:24:04.494265 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-s6d9w_a25d04de-e230-4750-a4c9-6a43bf344b9e/manager/0.log" Dec 03 18:24:04 crc kubenswrapper[5002]: I1203 18:24:04.539308 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-rlh8w_f49045c6-275a-46b4-8f61-9fd85401869f/kube-rbac-proxy/0.log" Dec 03 18:24:04 crc kubenswrapper[5002]: I1203 18:24:04.567320 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-rlh8w_f49045c6-275a-46b4-8f61-9fd85401869f/manager/0.log" Dec 03 18:24:04 crc kubenswrapper[5002]: I1203 18:24:04.720717 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-b2xhv_af132426-0104-4abf-bad5-67615b919af7/kube-rbac-proxy/0.log" Dec 03 18:24:04 crc kubenswrapper[5002]: I1203 18:24:04.746908 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-b2xhv_af132426-0104-4abf-bad5-67615b919af7/manager/0.log" Dec 03 18:24:04 crc kubenswrapper[5002]: I1203 18:24:04.851780 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-95x4f_c23347dc-e104-4ca5-a132-60a102150117/kube-rbac-proxy/0.log" Dec 03 18:24:04 crc kubenswrapper[5002]: I1203 18:24:04.984029 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-8hlz6_a54f31fa-c7c6-48b0-90b9-df614b116b8c/kube-rbac-proxy/0.log" Dec 03 18:24:05 crc kubenswrapper[5002]: I1203 18:24:05.122017 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-8hlz6_a54f31fa-c7c6-48b0-90b9-df614b116b8c/manager/0.log" Dec 03 18:24:05 crc kubenswrapper[5002]: I1203 18:24:05.168284 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-95x4f_c23347dc-e104-4ca5-a132-60a102150117/manager/0.log" Dec 03 18:24:05 crc kubenswrapper[5002]: I1203 18:24:05.241381 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-bfqzl_4b55dbea-68c7-4290-a698-068c741b22a6/kube-rbac-proxy/0.log" Dec 03 18:24:05 crc kubenswrapper[5002]: I1203 18:24:05.368440 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-bfqzl_4b55dbea-68c7-4290-a698-068c741b22a6/manager/0.log" Dec 03 18:24:05 crc kubenswrapper[5002]: I1203 18:24:05.387174 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-7hcw5_ec47f682-ce44-4e37-980b-a5e1c1142284/kube-rbac-proxy/0.log" Dec 03 18:24:05 crc kubenswrapper[5002]: I1203 18:24:05.458588 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-7hcw5_ec47f682-ce44-4e37-980b-a5e1c1142284/manager/0.log" Dec 03 18:24:05 crc kubenswrapper[5002]: I1203 18:24:05.577474 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-rm2fw_e4af56b3-88df-4902-b51a-1ed81ced8583/kube-rbac-proxy/0.log" Dec 03 18:24:05 crc kubenswrapper[5002]: I1203 18:24:05.600844 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-rm2fw_e4af56b3-88df-4902-b51a-1ed81ced8583/manager/0.log" Dec 03 18:24:05 crc kubenswrapper[5002]: I1203 18:24:05.779459 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-gp7hz_5ef47d10-5c5b-4cd6-b194-753a9fcc1b54/kube-rbac-proxy/0.log" Dec 03 18:24:05 crc kubenswrapper[5002]: I1203 18:24:05.827404 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-gp7hz_5ef47d10-5c5b-4cd6-b194-753a9fcc1b54/manager/0.log" Dec 03 18:24:05 crc kubenswrapper[5002]: I1203 18:24:05.901645 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-ngjzc_43a120e7-09d2-4d8e-8acd-06b6ab38cc40/kube-rbac-proxy/0.log" Dec 03 18:24:06 crc kubenswrapper[5002]: I1203 18:24:06.049104 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-cj9f5_0e786251-b94d-47b4-930f-5f2cac19cc52/kube-rbac-proxy/0.log" Dec 03 18:24:06 crc kubenswrapper[5002]: I1203 18:24:06.129172 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-ngjzc_43a120e7-09d2-4d8e-8acd-06b6ab38cc40/manager/0.log" Dec 03 18:24:06 crc kubenswrapper[5002]: I1203 18:24:06.363569 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-cj9f5_0e786251-b94d-47b4-930f-5f2cac19cc52/manager/0.log" Dec 03 18:24:06 crc kubenswrapper[5002]: I1203 18:24:06.387755 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55d86b6686z66jk_127b2eae-6b24-45a7-871b-e9569b062e28/manager/0.log" Dec 03 18:24:06 crc kubenswrapper[5002]: I1203 18:24:06.434696 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55d86b6686z66jk_127b2eae-6b24-45a7-871b-e9569b062e28/kube-rbac-proxy/0.log" Dec 03 18:24:06 crc kubenswrapper[5002]: I1203 18:24:06.815153 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7dd5c7bb7c-g9r6n_b7076603-1553-462f-ad95-bbb4b423a1cc/operator/0.log" Dec 03 18:24:06 crc kubenswrapper[5002]: I1203 18:24:06.921556 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-4j7h9_de13f869-61a0-48df-817f-e75d1d405308/registry-server/0.log" Dec 03 18:24:07 crc kubenswrapper[5002]: I1203 18:24:07.032605 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-bwfpx_cd874f14-4eb0-46ae-8968-4a52e4a3bc69/kube-rbac-proxy/0.log" Dec 03 18:24:07 crc kubenswrapper[5002]: I1203 18:24:07.133309 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-bwfpx_cd874f14-4eb0-46ae-8968-4a52e4a3bc69/manager/0.log" Dec 03 18:24:07 crc kubenswrapper[5002]: I1203 18:24:07.221988 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-lfdk7_b098b672-2320-4ba6-bd50-2237cb576d80/kube-rbac-proxy/0.log" Dec 03 18:24:07 crc kubenswrapper[5002]: I1203 18:24:07.345553 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-lfdk7_b098b672-2320-4ba6-bd50-2237cb576d80/manager/0.log" Dec 03 18:24:07 crc kubenswrapper[5002]: I1203 18:24:07.466339 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-zgpvf_84f51195-36c1-4039-af68-b0643b7c27e5/operator/0.log" Dec 03 18:24:07 crc kubenswrapper[5002]: I1203 18:24:07.585887 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-jbfdk_97caeb4d-2742-41fb-ac4f-440555163d81/kube-rbac-proxy/0.log" Dec 03 18:24:07 crc kubenswrapper[5002]: I1203 18:24:07.684813 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-jbfdk_97caeb4d-2742-41fb-ac4f-440555163d81/manager/0.log" Dec 03 18:24:07 crc kubenswrapper[5002]: I1203 18:24:07.766670 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-4rqsk_395c93a8-4649-4ddf-b630-d9982c670991/kube-rbac-proxy/0.log" Dec 03 18:24:07 crc kubenswrapper[5002]: I1203 18:24:07.971696 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-w2vkv_6fc5b30e-4458-4d0e-b476-107c4f92f56a/kube-rbac-proxy/0.log" Dec 03 18:24:08 crc kubenswrapper[5002]: I1203 18:24:08.001461 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-4rqsk_395c93a8-4649-4ddf-b630-d9982c670991/manager/0.log" Dec 03 18:24:08 crc kubenswrapper[5002]: I1203 18:24:08.002205 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-w2vkv_6fc5b30e-4458-4d0e-b476-107c4f92f56a/manager/0.log" Dec 03 18:24:08 crc kubenswrapper[5002]: I1203 18:24:08.166692 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-8jrq7_4ddf28f1-4702-477d-b1d8-4f6758b4dc9a/manager/0.log" Dec 03 18:24:08 crc kubenswrapper[5002]: I1203 18:24:08.214583 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-8jrq7_4ddf28f1-4702-477d-b1d8-4f6758b4dc9a/kube-rbac-proxy/0.log" Dec 03 18:24:08 crc kubenswrapper[5002]: I1203 18:24:08.521168 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-9f56fc979-b5vdg_c295c3ec-fe2b-4ae0-a818-6847d923dc1d/manager/0.log" Dec 03 18:24:13 crc kubenswrapper[5002]: I1203 18:24:13.840893 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:24:13 crc kubenswrapper[5002]: E1203 18:24:13.841708 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:24:24 crc kubenswrapper[5002]: I1203 18:24:24.841535 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:24:24 crc kubenswrapper[5002]: E1203 18:24:24.842712 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:24:27 crc kubenswrapper[5002]: I1203 18:24:27.524435 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-d5jnd_3ffcb996-f277-4b13-942a-ff911dcf1899/control-plane-machine-set-operator/0.log" Dec 03 18:24:27 crc kubenswrapper[5002]: I1203 18:24:27.686090 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-8xg65_6e8cf598-b803-4504-a472-49efee59fd59/kube-rbac-proxy/0.log" Dec 03 18:24:27 crc kubenswrapper[5002]: I1203 18:24:27.711226 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-8xg65_6e8cf598-b803-4504-a472-49efee59fd59/machine-api-operator/0.log" Dec 03 18:24:38 crc kubenswrapper[5002]: I1203 18:24:38.841045 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:24:38 crc kubenswrapper[5002]: E1203 18:24:38.841903 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:24:40 crc kubenswrapper[5002]: I1203 18:24:40.693714 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-7lvds_fd9abb89-d8f6-4d1b-9a15-e600de7edaed/cert-manager-controller/0.log" Dec 03 18:24:40 crc kubenswrapper[5002]: I1203 18:24:40.798874 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-sll6f_549ee4dc-b28a-48da-ad9e-4155cdfb67d5/cert-manager-cainjector/0.log" Dec 03 18:24:40 crc kubenswrapper[5002]: I1203 18:24:40.898016 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-vlmzm_30a62dfc-673c-4360-97a3-bb603b87aeab/cert-manager-webhook/0.log" Dec 03 18:24:52 crc kubenswrapper[5002]: I1203 18:24:52.841652 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:24:52 crc kubenswrapper[5002]: E1203 18:24:52.845073 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:24:53 crc kubenswrapper[5002]: I1203 18:24:53.778642 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-tnw4z_6c0d4c8e-606d-4a79-85da-56d503115dde/nmstate-console-plugin/0.log" Dec 03 18:24:53 crc kubenswrapper[5002]: I1203 18:24:53.967576 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-rd64m_b952bb42-1df0-4837-9c7e-ab25b7949f89/nmstate-handler/0.log" Dec 03 18:24:53 crc kubenswrapper[5002]: I1203 18:24:53.975470 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-w2kzn_a3a0627d-e103-43bc-a5d6-0933fc925543/kube-rbac-proxy/0.log" Dec 03 18:24:54 crc kubenswrapper[5002]: I1203 18:24:54.080612 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-w2kzn_a3a0627d-e103-43bc-a5d6-0933fc925543/nmstate-metrics/0.log" Dec 03 18:24:54 crc kubenswrapper[5002]: I1203 18:24:54.150524 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-9hq4g_0c279a3b-ffa8-4136-9932-f483512bbb7c/nmstate-operator/0.log" Dec 03 18:24:54 crc kubenswrapper[5002]: I1203 18:24:54.278764 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-tv2qn_55e48020-7db5-4f57-8c21-0dec9e03ef5c/nmstate-webhook/0.log" Dec 03 18:25:06 crc kubenswrapper[5002]: I1203 18:25:06.857578 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:25:06 crc kubenswrapper[5002]: E1203 18:25:06.871565 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:25:09 crc kubenswrapper[5002]: I1203 18:25:09.751811 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-wqxqr_6f46647a-b329-42a7-9372-12ffde9fbb5f/kube-rbac-proxy/0.log" Dec 03 18:25:09 crc kubenswrapper[5002]: I1203 18:25:09.978238 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-wvmbf_6c695792-74ee-418f-92fd-4bcb18beeb5d/frr-k8s-webhook-server/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.017619 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-wqxqr_6f46647a-b329-42a7-9372-12ffde9fbb5f/controller/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.103231 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/cp-frr-files/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.327090 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/cp-frr-files/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.338325 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/cp-reloader/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.386622 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/cp-reloader/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.394737 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/cp-metrics/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.535767 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/cp-frr-files/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.541081 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/cp-reloader/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.612458 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/cp-metrics/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.620366 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/cp-metrics/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.841998 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/cp-metrics/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.842991 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/controller/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.843613 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/cp-frr-files/0.log" Dec 03 18:25:10 crc kubenswrapper[5002]: I1203 18:25:10.843696 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/cp-reloader/0.log" Dec 03 18:25:11 crc kubenswrapper[5002]: I1203 18:25:11.028100 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/frr-metrics/0.log" Dec 03 18:25:11 crc kubenswrapper[5002]: I1203 18:25:11.076837 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/kube-rbac-proxy-frr/0.log" Dec 03 18:25:11 crc kubenswrapper[5002]: I1203 18:25:11.082338 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/kube-rbac-proxy/0.log" Dec 03 18:25:11 crc kubenswrapper[5002]: I1203 18:25:11.282240 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/reloader/0.log" Dec 03 18:25:11 crc kubenswrapper[5002]: I1203 18:25:11.297113 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5c867b68cc-728xc_d700b14d-9221-4fea-b580-6bab5def0a78/manager/0.log" Dec 03 18:25:11 crc kubenswrapper[5002]: I1203 18:25:11.524432 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-8667694fb-cwcq5_3150e99c-fa83-4bd5-9c80-45e124725b16/webhook-server/0.log" Dec 03 18:25:11 crc kubenswrapper[5002]: I1203 18:25:11.771014 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-j7qgc_1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4/kube-rbac-proxy/0.log" Dec 03 18:25:12 crc kubenswrapper[5002]: I1203 18:25:12.640492 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-j7qgc_1c063a3c-3478-45aa-afc1-ad6d7fe1d6e4/speaker/0.log" Dec 03 18:25:13 crc kubenswrapper[5002]: I1203 18:25:13.647865 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wxdxf_6c74eee1-29ec-4886-ada2-083436d4dc82/frr/0.log" Dec 03 18:25:18 crc kubenswrapper[5002]: I1203 18:25:18.840982 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:25:18 crc kubenswrapper[5002]: E1203 18:25:18.842036 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:25:26 crc kubenswrapper[5002]: I1203 18:25:26.498600 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh_4dc1e514-8145-48e7-b668-0360bade0043/util/0.log" Dec 03 18:25:26 crc kubenswrapper[5002]: I1203 18:25:26.786476 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh_4dc1e514-8145-48e7-b668-0360bade0043/pull/0.log" Dec 03 18:25:26 crc kubenswrapper[5002]: I1203 18:25:26.787990 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh_4dc1e514-8145-48e7-b668-0360bade0043/util/0.log" Dec 03 18:25:26 crc kubenswrapper[5002]: I1203 18:25:26.808717 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh_4dc1e514-8145-48e7-b668-0360bade0043/pull/0.log" Dec 03 18:25:26 crc kubenswrapper[5002]: I1203 18:25:26.979375 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh_4dc1e514-8145-48e7-b668-0360bade0043/util/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.022408 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh_4dc1e514-8145-48e7-b668-0360bade0043/extract/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.048490 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axqknh_4dc1e514-8145-48e7-b668-0360bade0043/pull/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.174081 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh_5e70649f-5753-43d7-8641-572e9ab62148/util/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.358263 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh_5e70649f-5753-43d7-8641-572e9ab62148/pull/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.362998 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh_5e70649f-5753-43d7-8641-572e9ab62148/util/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.377359 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh_5e70649f-5753-43d7-8641-572e9ab62148/pull/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.550834 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh_5e70649f-5753-43d7-8641-572e9ab62148/util/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.576221 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh_5e70649f-5753-43d7-8641-572e9ab62148/pull/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.618178 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmmtnh_5e70649f-5753-43d7-8641-572e9ab62148/extract/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.757865 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh_eed6bf02-5a3f-401a-9e18-72374acb6931/util/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.941430 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh_eed6bf02-5a3f-401a-9e18-72374acb6931/util/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.954857 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh_eed6bf02-5a3f-401a-9e18-72374acb6931/pull/0.log" Dec 03 18:25:27 crc kubenswrapper[5002]: I1203 18:25:27.978061 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh_eed6bf02-5a3f-401a-9e18-72374acb6931/pull/0.log" Dec 03 18:25:28 crc kubenswrapper[5002]: I1203 18:25:28.181505 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh_eed6bf02-5a3f-401a-9e18-72374acb6931/util/0.log" Dec 03 18:25:28 crc kubenswrapper[5002]: I1203 18:25:28.199910 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh_eed6bf02-5a3f-401a-9e18-72374acb6931/extract/0.log" Dec 03 18:25:28 crc kubenswrapper[5002]: I1203 18:25:28.202432 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210ntprh_eed6bf02-5a3f-401a-9e18-72374acb6931/pull/0.log" Dec 03 18:25:28 crc kubenswrapper[5002]: I1203 18:25:28.410284 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5_389ed0b8-f8b8-4949-baf8-83c696910edf/util/0.log" Dec 03 18:25:28 crc kubenswrapper[5002]: I1203 18:25:28.536115 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5_389ed0b8-f8b8-4949-baf8-83c696910edf/pull/0.log" Dec 03 18:25:28 crc kubenswrapper[5002]: I1203 18:25:28.545244 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5_389ed0b8-f8b8-4949-baf8-83c696910edf/pull/0.log" Dec 03 18:25:28 crc kubenswrapper[5002]: I1203 18:25:28.546135 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5_389ed0b8-f8b8-4949-baf8-83c696910edf/util/0.log" Dec 03 18:25:28 crc kubenswrapper[5002]: I1203 18:25:28.749709 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5_389ed0b8-f8b8-4949-baf8-83c696910edf/extract/0.log" Dec 03 18:25:28 crc kubenswrapper[5002]: I1203 18:25:28.750884 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5_389ed0b8-f8b8-4949-baf8-83c696910edf/util/0.log" Dec 03 18:25:28 crc kubenswrapper[5002]: I1203 18:25:28.751541 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83xcdm5_389ed0b8-f8b8-4949-baf8-83c696910edf/pull/0.log" Dec 03 18:25:28 crc kubenswrapper[5002]: I1203 18:25:28.907951 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l74ff_9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad/extract-utilities/0.log" Dec 03 18:25:29 crc kubenswrapper[5002]: I1203 18:25:29.110871 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l74ff_9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad/extract-content/0.log" Dec 03 18:25:29 crc kubenswrapper[5002]: I1203 18:25:29.134843 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l74ff_9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad/extract-content/0.log" Dec 03 18:25:29 crc kubenswrapper[5002]: I1203 18:25:29.161886 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l74ff_9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad/extract-utilities/0.log" Dec 03 18:25:29 crc kubenswrapper[5002]: I1203 18:25:29.344837 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l74ff_9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad/extract-content/0.log" Dec 03 18:25:29 crc kubenswrapper[5002]: I1203 18:25:29.455295 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l74ff_9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad/extract-utilities/0.log" Dec 03 18:25:29 crc kubenswrapper[5002]: I1203 18:25:29.568378 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pqsn9_6f02b254-7cb6-486d-87e8-6de21f6f176c/extract-utilities/0.log" Dec 03 18:25:29 crc kubenswrapper[5002]: I1203 18:25:29.879108 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pqsn9_6f02b254-7cb6-486d-87e8-6de21f6f176c/extract-content/0.log" Dec 03 18:25:30 crc kubenswrapper[5002]: I1203 18:25:30.147024 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pqsn9_6f02b254-7cb6-486d-87e8-6de21f6f176c/extract-content/0.log" Dec 03 18:25:30 crc kubenswrapper[5002]: I1203 18:25:30.150836 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l74ff_9fc8c2ba-1630-4282-8fe1-82ac6c89d2ad/registry-server/0.log" Dec 03 18:25:30 crc kubenswrapper[5002]: I1203 18:25:30.162958 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pqsn9_6f02b254-7cb6-486d-87e8-6de21f6f176c/extract-utilities/0.log" Dec 03 18:25:30 crc kubenswrapper[5002]: I1203 18:25:30.262458 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pqsn9_6f02b254-7cb6-486d-87e8-6de21f6f176c/extract-utilities/0.log" Dec 03 18:25:30 crc kubenswrapper[5002]: I1203 18:25:30.398529 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pqsn9_6f02b254-7cb6-486d-87e8-6de21f6f176c/extract-content/0.log" Dec 03 18:25:30 crc kubenswrapper[5002]: I1203 18:25:30.482191 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-4qv6m_01e55b47-d730-453c-a240-6e6aeda14dc1/marketplace-operator/0.log" Dec 03 18:25:30 crc kubenswrapper[5002]: I1203 18:25:30.612067 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nd4c5_51d47103-19b4-4321-b9b3-45eff71e42ea/extract-utilities/0.log" Dec 03 18:25:30 crc kubenswrapper[5002]: I1203 18:25:30.832650 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nd4c5_51d47103-19b4-4321-b9b3-45eff71e42ea/extract-utilities/0.log" Dec 03 18:25:30 crc kubenswrapper[5002]: I1203 18:25:30.871734 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nd4c5_51d47103-19b4-4321-b9b3-45eff71e42ea/extract-content/0.log" Dec 03 18:25:30 crc kubenswrapper[5002]: I1203 18:25:30.872005 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nd4c5_51d47103-19b4-4321-b9b3-45eff71e42ea/extract-content/0.log" Dec 03 18:25:31 crc kubenswrapper[5002]: I1203 18:25:31.108906 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nd4c5_51d47103-19b4-4321-b9b3-45eff71e42ea/extract-content/0.log" Dec 03 18:25:31 crc kubenswrapper[5002]: I1203 18:25:31.162507 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nd4c5_51d47103-19b4-4321-b9b3-45eff71e42ea/extract-utilities/0.log" Dec 03 18:25:31 crc kubenswrapper[5002]: I1203 18:25:31.170604 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pqsn9_6f02b254-7cb6-486d-87e8-6de21f6f176c/registry-server/0.log" Dec 03 18:25:31 crc kubenswrapper[5002]: I1203 18:25:31.362061 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vjcrp_14775129-525c-4d0d-9ba5-d28b6066c8ba/extract-utilities/0.log" Dec 03 18:25:31 crc kubenswrapper[5002]: I1203 18:25:31.400001 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nd4c5_51d47103-19b4-4321-b9b3-45eff71e42ea/registry-server/0.log" Dec 03 18:25:31 crc kubenswrapper[5002]: I1203 18:25:31.537333 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vjcrp_14775129-525c-4d0d-9ba5-d28b6066c8ba/extract-content/0.log" Dec 03 18:25:31 crc kubenswrapper[5002]: I1203 18:25:31.553942 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vjcrp_14775129-525c-4d0d-9ba5-d28b6066c8ba/extract-utilities/0.log" Dec 03 18:25:31 crc kubenswrapper[5002]: I1203 18:25:31.587685 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vjcrp_14775129-525c-4d0d-9ba5-d28b6066c8ba/extract-content/0.log" Dec 03 18:25:31 crc kubenswrapper[5002]: I1203 18:25:31.747529 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vjcrp_14775129-525c-4d0d-9ba5-d28b6066c8ba/extract-utilities/0.log" Dec 03 18:25:31 crc kubenswrapper[5002]: I1203 18:25:31.778980 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vjcrp_14775129-525c-4d0d-9ba5-d28b6066c8ba/extract-content/0.log" Dec 03 18:25:32 crc kubenswrapper[5002]: I1203 18:25:32.812705 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vjcrp_14775129-525c-4d0d-9ba5-d28b6066c8ba/registry-server/0.log" Dec 03 18:25:32 crc kubenswrapper[5002]: I1203 18:25:32.846735 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:25:32 crc kubenswrapper[5002]: E1203 18:25:32.847141 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:25:45 crc kubenswrapper[5002]: I1203 18:25:45.679104 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-5s8x4_427005c4-4ead-4ac2-b151-21f74eac9b18/prometheus-operator/0.log" Dec 03 18:25:45 crc kubenswrapper[5002]: I1203 18:25:45.888827 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-848b69754c-lzv9c_5bb2698d-64e4-4595-aa89-b4fb8751109c/prometheus-operator-admission-webhook/0.log" Dec 03 18:25:45 crc kubenswrapper[5002]: I1203 18:25:45.951816 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-848b69754c-qxxsx_b32b9e1f-8c29-47c6-b0c1-e4389e55422a/prometheus-operator-admission-webhook/0.log" Dec 03 18:25:46 crc kubenswrapper[5002]: I1203 18:25:46.092452 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-mblh2_40a6500c-ca6c-4b78-a9f5-cab04e7fae29/operator/0.log" Dec 03 18:25:46 crc kubenswrapper[5002]: I1203 18:25:46.119351 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-b6zsg_1776756e-524b-4933-98d6-375c71008ac4/perses-operator/0.log" Dec 03 18:25:47 crc kubenswrapper[5002]: I1203 18:25:47.841677 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:25:47 crc kubenswrapper[5002]: E1203 18:25:47.842032 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:25:49 crc kubenswrapper[5002]: I1203 18:25:49.584228 5002 scope.go:117] "RemoveContainer" containerID="d70907bc3bde979acd4451a27bc2559c5044f813c594ca4d4641eb6375b7ef80" Dec 03 18:25:58 crc kubenswrapper[5002]: I1203 18:25:58.840626 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:25:58 crc kubenswrapper[5002]: E1203 18:25:58.841424 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:26:13 crc kubenswrapper[5002]: I1203 18:26:13.840824 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:26:13 crc kubenswrapper[5002]: E1203 18:26:13.841523 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:26:15 crc kubenswrapper[5002]: E1203 18:26:15.001805 5002 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.155:40050->38.102.83.155:42635: write tcp 38.102.83.155:40050->38.102.83.155:42635: write: broken pipe Dec 03 18:26:24 crc kubenswrapper[5002]: I1203 18:26:24.840694 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:26:24 crc kubenswrapper[5002]: E1203 18:26:24.842157 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:26:29 crc kubenswrapper[5002]: I1203 18:26:29.625991 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-7hcw5" podUID="ec47f682-ce44-4e37-980b-a5e1c1142284" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.83:8081/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 18:26:30 crc kubenswrapper[5002]: I1203 18:26:30.167918 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="metallb-system/frr-k8s-wxdxf" podUID="6c74eee1-29ec-4886-ada2-083436d4dc82" containerName="controller" probeResult="failure" output="Get \"http://127.0.0.1:7572/metrics\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.169950 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5j9fh"] Dec 03 18:26:34 crc kubenswrapper[5002]: E1203 18:26:34.170972 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f2d987d-d6ea-475b-bbab-044267f7f264" containerName="container-00" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.170990 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f2d987d-d6ea-475b-bbab-044267f7f264" containerName="container-00" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.171238 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f2d987d-d6ea-475b-bbab-044267f7f264" containerName="container-00" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.173614 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.193586 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5j9fh"] Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.344306 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b875627b-877c-473d-96a8-ca6e2ca30c24-catalog-content\") pod \"redhat-operators-5j9fh\" (UID: \"b875627b-877c-473d-96a8-ca6e2ca30c24\") " pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.344352 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4dv8\" (UniqueName: \"kubernetes.io/projected/b875627b-877c-473d-96a8-ca6e2ca30c24-kube-api-access-l4dv8\") pod \"redhat-operators-5j9fh\" (UID: \"b875627b-877c-473d-96a8-ca6e2ca30c24\") " pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.344857 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b875627b-877c-473d-96a8-ca6e2ca30c24-utilities\") pod \"redhat-operators-5j9fh\" (UID: \"b875627b-877c-473d-96a8-ca6e2ca30c24\") " pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.447526 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b875627b-877c-473d-96a8-ca6e2ca30c24-utilities\") pod \"redhat-operators-5j9fh\" (UID: \"b875627b-877c-473d-96a8-ca6e2ca30c24\") " pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.447643 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4dv8\" (UniqueName: \"kubernetes.io/projected/b875627b-877c-473d-96a8-ca6e2ca30c24-kube-api-access-l4dv8\") pod \"redhat-operators-5j9fh\" (UID: \"b875627b-877c-473d-96a8-ca6e2ca30c24\") " pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.447664 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b875627b-877c-473d-96a8-ca6e2ca30c24-catalog-content\") pod \"redhat-operators-5j9fh\" (UID: \"b875627b-877c-473d-96a8-ca6e2ca30c24\") " pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.448050 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b875627b-877c-473d-96a8-ca6e2ca30c24-utilities\") pod \"redhat-operators-5j9fh\" (UID: \"b875627b-877c-473d-96a8-ca6e2ca30c24\") " pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.448239 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b875627b-877c-473d-96a8-ca6e2ca30c24-catalog-content\") pod \"redhat-operators-5j9fh\" (UID: \"b875627b-877c-473d-96a8-ca6e2ca30c24\") " pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.473699 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4dv8\" (UniqueName: \"kubernetes.io/projected/b875627b-877c-473d-96a8-ca6e2ca30c24-kube-api-access-l4dv8\") pod \"redhat-operators-5j9fh\" (UID: \"b875627b-877c-473d-96a8-ca6e2ca30c24\") " pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:34 crc kubenswrapper[5002]: I1203 18:26:34.502124 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:35 crc kubenswrapper[5002]: I1203 18:26:35.563801 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5j9fh"] Dec 03 18:26:36 crc kubenswrapper[5002]: I1203 18:26:36.256809 5002 generic.go:334] "Generic (PLEG): container finished" podID="b875627b-877c-473d-96a8-ca6e2ca30c24" containerID="392deff5a17120ab6e9bf621b0cf2e34129fea3ba20032052a7e857ef89b50a8" exitCode=0 Dec 03 18:26:36 crc kubenswrapper[5002]: I1203 18:26:36.256983 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5j9fh" event={"ID":"b875627b-877c-473d-96a8-ca6e2ca30c24","Type":"ContainerDied","Data":"392deff5a17120ab6e9bf621b0cf2e34129fea3ba20032052a7e857ef89b50a8"} Dec 03 18:26:36 crc kubenswrapper[5002]: I1203 18:26:36.257084 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5j9fh" event={"ID":"b875627b-877c-473d-96a8-ca6e2ca30c24","Type":"ContainerStarted","Data":"afda43488be897f3a284f95161c408bbc1478dc991f79b8ba43b42486a56670c"} Dec 03 18:26:38 crc kubenswrapper[5002]: I1203 18:26:38.275840 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5j9fh" event={"ID":"b875627b-877c-473d-96a8-ca6e2ca30c24","Type":"ContainerStarted","Data":"20f7e9b78ff9da9cc7aa25aa21fff97bf578d8ab6fe00ff83d72c7da773879ca"} Dec 03 18:26:38 crc kubenswrapper[5002]: I1203 18:26:38.844739 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:26:38 crc kubenswrapper[5002]: E1203 18:26:38.845013 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:26:43 crc kubenswrapper[5002]: I1203 18:26:43.329174 5002 generic.go:334] "Generic (PLEG): container finished" podID="b875627b-877c-473d-96a8-ca6e2ca30c24" containerID="20f7e9b78ff9da9cc7aa25aa21fff97bf578d8ab6fe00ff83d72c7da773879ca" exitCode=0 Dec 03 18:26:43 crc kubenswrapper[5002]: I1203 18:26:43.329226 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5j9fh" event={"ID":"b875627b-877c-473d-96a8-ca6e2ca30c24","Type":"ContainerDied","Data":"20f7e9b78ff9da9cc7aa25aa21fff97bf578d8ab6fe00ff83d72c7da773879ca"} Dec 03 18:26:44 crc kubenswrapper[5002]: I1203 18:26:44.341047 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5j9fh" event={"ID":"b875627b-877c-473d-96a8-ca6e2ca30c24","Type":"ContainerStarted","Data":"4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3"} Dec 03 18:26:44 crc kubenswrapper[5002]: I1203 18:26:44.362796 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5j9fh" podStartSLOduration=2.921465223 podStartE2EDuration="10.362777638s" podCreationTimestamp="2025-12-03 18:26:34 +0000 UTC" firstStartedPulling="2025-12-03 18:26:36.258614307 +0000 UTC m=+6919.672436195" lastFinishedPulling="2025-12-03 18:26:43.699926722 +0000 UTC m=+6927.113748610" observedRunningTime="2025-12-03 18:26:44.354793683 +0000 UTC m=+6927.768615591" watchObservedRunningTime="2025-12-03 18:26:44.362777638 +0000 UTC m=+6927.776599516" Dec 03 18:26:44 crc kubenswrapper[5002]: I1203 18:26:44.502717 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:44 crc kubenswrapper[5002]: I1203 18:26:44.502780 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:45 crc kubenswrapper[5002]: I1203 18:26:45.550201 5002 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5j9fh" podUID="b875627b-877c-473d-96a8-ca6e2ca30c24" containerName="registry-server" probeResult="failure" output=< Dec 03 18:26:45 crc kubenswrapper[5002]: timeout: failed to connect service ":50051" within 1s Dec 03 18:26:45 crc kubenswrapper[5002]: > Dec 03 18:26:53 crc kubenswrapper[5002]: I1203 18:26:53.842006 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:26:53 crc kubenswrapper[5002]: E1203 18:26:53.843206 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:26:54 crc kubenswrapper[5002]: I1203 18:26:54.584272 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:54 crc kubenswrapper[5002]: I1203 18:26:54.649019 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:54 crc kubenswrapper[5002]: I1203 18:26:54.826303 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5j9fh"] Dec 03 18:26:56 crc kubenswrapper[5002]: I1203 18:26:56.458262 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5j9fh" podUID="b875627b-877c-473d-96a8-ca6e2ca30c24" containerName="registry-server" containerID="cri-o://4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3" gracePeriod=2 Dec 03 18:26:56 crc kubenswrapper[5002]: I1203 18:26:56.992819 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.121782 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b875627b-877c-473d-96a8-ca6e2ca30c24-catalog-content\") pod \"b875627b-877c-473d-96a8-ca6e2ca30c24\" (UID: \"b875627b-877c-473d-96a8-ca6e2ca30c24\") " Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.122090 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b875627b-877c-473d-96a8-ca6e2ca30c24-utilities\") pod \"b875627b-877c-473d-96a8-ca6e2ca30c24\" (UID: \"b875627b-877c-473d-96a8-ca6e2ca30c24\") " Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.122145 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4dv8\" (UniqueName: \"kubernetes.io/projected/b875627b-877c-473d-96a8-ca6e2ca30c24-kube-api-access-l4dv8\") pod \"b875627b-877c-473d-96a8-ca6e2ca30c24\" (UID: \"b875627b-877c-473d-96a8-ca6e2ca30c24\") " Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.124761 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b875627b-877c-473d-96a8-ca6e2ca30c24-utilities" (OuterVolumeSpecName: "utilities") pod "b875627b-877c-473d-96a8-ca6e2ca30c24" (UID: "b875627b-877c-473d-96a8-ca6e2ca30c24"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.128330 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b875627b-877c-473d-96a8-ca6e2ca30c24-kube-api-access-l4dv8" (OuterVolumeSpecName: "kube-api-access-l4dv8") pod "b875627b-877c-473d-96a8-ca6e2ca30c24" (UID: "b875627b-877c-473d-96a8-ca6e2ca30c24"). InnerVolumeSpecName "kube-api-access-l4dv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.223992 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b875627b-877c-473d-96a8-ca6e2ca30c24-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b875627b-877c-473d-96a8-ca6e2ca30c24" (UID: "b875627b-877c-473d-96a8-ca6e2ca30c24"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.224781 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b875627b-877c-473d-96a8-ca6e2ca30c24-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.224802 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4dv8\" (UniqueName: \"kubernetes.io/projected/b875627b-877c-473d-96a8-ca6e2ca30c24-kube-api-access-l4dv8\") on node \"crc\" DevicePath \"\"" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.224813 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b875627b-877c-473d-96a8-ca6e2ca30c24-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.471933 5002 generic.go:334] "Generic (PLEG): container finished" podID="b875627b-877c-473d-96a8-ca6e2ca30c24" containerID="4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3" exitCode=0 Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.472007 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5j9fh" event={"ID":"b875627b-877c-473d-96a8-ca6e2ca30c24","Type":"ContainerDied","Data":"4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3"} Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.472373 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5j9fh" event={"ID":"b875627b-877c-473d-96a8-ca6e2ca30c24","Type":"ContainerDied","Data":"afda43488be897f3a284f95161c408bbc1478dc991f79b8ba43b42486a56670c"} Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.472064 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5j9fh" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.472403 5002 scope.go:117] "RemoveContainer" containerID="4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.498929 5002 scope.go:117] "RemoveContainer" containerID="20f7e9b78ff9da9cc7aa25aa21fff97bf578d8ab6fe00ff83d72c7da773879ca" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.534260 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5j9fh"] Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.540297 5002 scope.go:117] "RemoveContainer" containerID="392deff5a17120ab6e9bf621b0cf2e34129fea3ba20032052a7e857ef89b50a8" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.543106 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5j9fh"] Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.590263 5002 scope.go:117] "RemoveContainer" containerID="4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3" Dec 03 18:26:57 crc kubenswrapper[5002]: E1203 18:26:57.590727 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3\": container with ID starting with 4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3 not found: ID does not exist" containerID="4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.590781 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3"} err="failed to get container status \"4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3\": rpc error: code = NotFound desc = could not find container \"4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3\": container with ID starting with 4e56fa08f699ea45f01654948864dce0f0e2a78140db722d4bc9cbda14aed1f3 not found: ID does not exist" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.590805 5002 scope.go:117] "RemoveContainer" containerID="20f7e9b78ff9da9cc7aa25aa21fff97bf578d8ab6fe00ff83d72c7da773879ca" Dec 03 18:26:57 crc kubenswrapper[5002]: E1203 18:26:57.591105 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20f7e9b78ff9da9cc7aa25aa21fff97bf578d8ab6fe00ff83d72c7da773879ca\": container with ID starting with 20f7e9b78ff9da9cc7aa25aa21fff97bf578d8ab6fe00ff83d72c7da773879ca not found: ID does not exist" containerID="20f7e9b78ff9da9cc7aa25aa21fff97bf578d8ab6fe00ff83d72c7da773879ca" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.591141 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20f7e9b78ff9da9cc7aa25aa21fff97bf578d8ab6fe00ff83d72c7da773879ca"} err="failed to get container status \"20f7e9b78ff9da9cc7aa25aa21fff97bf578d8ab6fe00ff83d72c7da773879ca\": rpc error: code = NotFound desc = could not find container \"20f7e9b78ff9da9cc7aa25aa21fff97bf578d8ab6fe00ff83d72c7da773879ca\": container with ID starting with 20f7e9b78ff9da9cc7aa25aa21fff97bf578d8ab6fe00ff83d72c7da773879ca not found: ID does not exist" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.591165 5002 scope.go:117] "RemoveContainer" containerID="392deff5a17120ab6e9bf621b0cf2e34129fea3ba20032052a7e857ef89b50a8" Dec 03 18:26:57 crc kubenswrapper[5002]: E1203 18:26:57.591364 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"392deff5a17120ab6e9bf621b0cf2e34129fea3ba20032052a7e857ef89b50a8\": container with ID starting with 392deff5a17120ab6e9bf621b0cf2e34129fea3ba20032052a7e857ef89b50a8 not found: ID does not exist" containerID="392deff5a17120ab6e9bf621b0cf2e34129fea3ba20032052a7e857ef89b50a8" Dec 03 18:26:57 crc kubenswrapper[5002]: I1203 18:26:57.591386 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"392deff5a17120ab6e9bf621b0cf2e34129fea3ba20032052a7e857ef89b50a8"} err="failed to get container status \"392deff5a17120ab6e9bf621b0cf2e34129fea3ba20032052a7e857ef89b50a8\": rpc error: code = NotFound desc = could not find container \"392deff5a17120ab6e9bf621b0cf2e34129fea3ba20032052a7e857ef89b50a8\": container with ID starting with 392deff5a17120ab6e9bf621b0cf2e34129fea3ba20032052a7e857ef89b50a8 not found: ID does not exist" Dec 03 18:26:58 crc kubenswrapper[5002]: I1203 18:26:58.853683 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b875627b-877c-473d-96a8-ca6e2ca30c24" path="/var/lib/kubelet/pods/b875627b-877c-473d-96a8-ca6e2ca30c24/volumes" Dec 03 18:27:08 crc kubenswrapper[5002]: I1203 18:27:08.840952 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:27:08 crc kubenswrapper[5002]: E1203 18:27:08.842134 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:27:12 crc kubenswrapper[5002]: I1203 18:27:12.068910 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-szwjx"] Dec 03 18:27:12 crc kubenswrapper[5002]: I1203 18:27:12.087667 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-szwjx"] Dec 03 18:27:12 crc kubenswrapper[5002]: I1203 18:27:12.862482 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4973cbe-4313-4bdd-af74-81dade285f65" path="/var/lib/kubelet/pods/e4973cbe-4313-4bdd-af74-81dade285f65/volumes" Dec 03 18:27:13 crc kubenswrapper[5002]: I1203 18:27:13.028949 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-70e9-account-create-update-sp89s"] Dec 03 18:27:13 crc kubenswrapper[5002]: I1203 18:27:13.038616 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-70e9-account-create-update-sp89s"] Dec 03 18:27:14 crc kubenswrapper[5002]: I1203 18:27:14.854594 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a49549ac-d109-4567-b79c-a2df131387aa" path="/var/lib/kubelet/pods/a49549ac-d109-4567-b79c-a2df131387aa/volumes" Dec 03 18:27:18 crc kubenswrapper[5002]: I1203 18:27:18.747217 5002 generic.go:334] "Generic (PLEG): container finished" podID="5779a321-8268-424b-bef2-08e0dd158ebc" containerID="656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c" exitCode=0 Dec 03 18:27:18 crc kubenswrapper[5002]: I1203 18:27:18.747302 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pjrqh/must-gather-vsg9p" event={"ID":"5779a321-8268-424b-bef2-08e0dd158ebc","Type":"ContainerDied","Data":"656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c"} Dec 03 18:27:18 crc kubenswrapper[5002]: I1203 18:27:18.749613 5002 scope.go:117] "RemoveContainer" containerID="656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c" Dec 03 18:27:19 crc kubenswrapper[5002]: I1203 18:27:19.750426 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pjrqh_must-gather-vsg9p_5779a321-8268-424b-bef2-08e0dd158ebc/gather/0.log" Dec 03 18:27:19 crc kubenswrapper[5002]: I1203 18:27:19.841882 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:27:19 crc kubenswrapper[5002]: E1203 18:27:19.842193 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:27:26 crc kubenswrapper[5002]: I1203 18:27:26.052755 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-tdkwj"] Dec 03 18:27:26 crc kubenswrapper[5002]: I1203 18:27:26.065846 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-tdkwj"] Dec 03 18:27:26 crc kubenswrapper[5002]: I1203 18:27:26.853625 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94a46076-2c6c-48fe-8ec3-6b239ab5aa55" path="/var/lib/kubelet/pods/94a46076-2c6c-48fe-8ec3-6b239ab5aa55/volumes" Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.116534 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pjrqh/must-gather-vsg9p"] Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.117233 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-pjrqh/must-gather-vsg9p" podUID="5779a321-8268-424b-bef2-08e0dd158ebc" containerName="copy" containerID="cri-o://3847bd462163ceabab39528a8f8c49a889d1fa1feafe0f96232c710fba9d710c" gracePeriod=2 Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.129777 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pjrqh/must-gather-vsg9p"] Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.661099 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pjrqh_must-gather-vsg9p_5779a321-8268-424b-bef2-08e0dd158ebc/copy/0.log" Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.662178 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pjrqh/must-gather-vsg9p" Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.761925 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwhxn\" (UniqueName: \"kubernetes.io/projected/5779a321-8268-424b-bef2-08e0dd158ebc-kube-api-access-mwhxn\") pod \"5779a321-8268-424b-bef2-08e0dd158ebc\" (UID: \"5779a321-8268-424b-bef2-08e0dd158ebc\") " Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.762051 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5779a321-8268-424b-bef2-08e0dd158ebc-must-gather-output\") pod \"5779a321-8268-424b-bef2-08e0dd158ebc\" (UID: \"5779a321-8268-424b-bef2-08e0dd158ebc\") " Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.769025 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5779a321-8268-424b-bef2-08e0dd158ebc-kube-api-access-mwhxn" (OuterVolumeSpecName: "kube-api-access-mwhxn") pod "5779a321-8268-424b-bef2-08e0dd158ebc" (UID: "5779a321-8268-424b-bef2-08e0dd158ebc"). InnerVolumeSpecName "kube-api-access-mwhxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.847670 5002 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pjrqh_must-gather-vsg9p_5779a321-8268-424b-bef2-08e0dd158ebc/copy/0.log" Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.848056 5002 generic.go:334] "Generic (PLEG): container finished" podID="5779a321-8268-424b-bef2-08e0dd158ebc" containerID="3847bd462163ceabab39528a8f8c49a889d1fa1feafe0f96232c710fba9d710c" exitCode=143 Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.848124 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pjrqh/must-gather-vsg9p" Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.867316 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwhxn\" (UniqueName: \"kubernetes.io/projected/5779a321-8268-424b-bef2-08e0dd158ebc-kube-api-access-mwhxn\") on node \"crc\" DevicePath \"\"" Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.872519 5002 scope.go:117] "RemoveContainer" containerID="3847bd462163ceabab39528a8f8c49a889d1fa1feafe0f96232c710fba9d710c" Dec 03 18:27:28 crc kubenswrapper[5002]: I1203 18:27:28.909969 5002 scope.go:117] "RemoveContainer" containerID="656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c" Dec 03 18:27:29 crc kubenswrapper[5002]: I1203 18:27:29.021960 5002 scope.go:117] "RemoveContainer" containerID="3847bd462163ceabab39528a8f8c49a889d1fa1feafe0f96232c710fba9d710c" Dec 03 18:27:29 crc kubenswrapper[5002]: E1203 18:27:29.030883 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3847bd462163ceabab39528a8f8c49a889d1fa1feafe0f96232c710fba9d710c\": container with ID starting with 3847bd462163ceabab39528a8f8c49a889d1fa1feafe0f96232c710fba9d710c not found: ID does not exist" containerID="3847bd462163ceabab39528a8f8c49a889d1fa1feafe0f96232c710fba9d710c" Dec 03 18:27:29 crc kubenswrapper[5002]: I1203 18:27:29.030930 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3847bd462163ceabab39528a8f8c49a889d1fa1feafe0f96232c710fba9d710c"} err="failed to get container status \"3847bd462163ceabab39528a8f8c49a889d1fa1feafe0f96232c710fba9d710c\": rpc error: code = NotFound desc = could not find container \"3847bd462163ceabab39528a8f8c49a889d1fa1feafe0f96232c710fba9d710c\": container with ID starting with 3847bd462163ceabab39528a8f8c49a889d1fa1feafe0f96232c710fba9d710c not found: ID does not exist" Dec 03 18:27:29 crc kubenswrapper[5002]: I1203 18:27:29.030954 5002 scope.go:117] "RemoveContainer" containerID="656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c" Dec 03 18:27:29 crc kubenswrapper[5002]: E1203 18:27:29.040711 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c\": container with ID starting with 656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c not found: ID does not exist" containerID="656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c" Dec 03 18:27:29 crc kubenswrapper[5002]: I1203 18:27:29.040780 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c"} err="failed to get container status \"656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c\": rpc error: code = NotFound desc = could not find container \"656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c\": container with ID starting with 656a734abf020aa767bf02ffe76f2176d4e337da415af46d4315a16605336c5c not found: ID does not exist" Dec 03 18:27:29 crc kubenswrapper[5002]: I1203 18:27:29.066274 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5779a321-8268-424b-bef2-08e0dd158ebc-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "5779a321-8268-424b-bef2-08e0dd158ebc" (UID: "5779a321-8268-424b-bef2-08e0dd158ebc"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:27:29 crc kubenswrapper[5002]: I1203 18:27:29.077129 5002 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5779a321-8268-424b-bef2-08e0dd158ebc-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 18:27:30 crc kubenswrapper[5002]: I1203 18:27:30.840537 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:27:30 crc kubenswrapper[5002]: E1203 18:27:30.841245 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:27:30 crc kubenswrapper[5002]: I1203 18:27:30.853473 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5779a321-8268-424b-bef2-08e0dd158ebc" path="/var/lib/kubelet/pods/5779a321-8268-424b-bef2-08e0dd158ebc/volumes" Dec 03 18:27:42 crc kubenswrapper[5002]: I1203 18:27:42.841144 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:27:42 crc kubenswrapper[5002]: E1203 18:27:42.841943 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.390679 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7rtk6"] Dec 03 18:27:47 crc kubenswrapper[5002]: E1203 18:27:47.391878 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b875627b-877c-473d-96a8-ca6e2ca30c24" containerName="extract-content" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.391897 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b875627b-877c-473d-96a8-ca6e2ca30c24" containerName="extract-content" Dec 03 18:27:47 crc kubenswrapper[5002]: E1203 18:27:47.391923 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5779a321-8268-424b-bef2-08e0dd158ebc" containerName="gather" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.391932 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5779a321-8268-424b-bef2-08e0dd158ebc" containerName="gather" Dec 03 18:27:47 crc kubenswrapper[5002]: E1203 18:27:47.391949 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b875627b-877c-473d-96a8-ca6e2ca30c24" containerName="registry-server" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.391956 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b875627b-877c-473d-96a8-ca6e2ca30c24" containerName="registry-server" Dec 03 18:27:47 crc kubenswrapper[5002]: E1203 18:27:47.391966 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5779a321-8268-424b-bef2-08e0dd158ebc" containerName="copy" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.391974 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="5779a321-8268-424b-bef2-08e0dd158ebc" containerName="copy" Dec 03 18:27:47 crc kubenswrapper[5002]: E1203 18:27:47.391984 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b875627b-877c-473d-96a8-ca6e2ca30c24" containerName="extract-utilities" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.391993 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="b875627b-877c-473d-96a8-ca6e2ca30c24" containerName="extract-utilities" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.392234 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="b875627b-877c-473d-96a8-ca6e2ca30c24" containerName="registry-server" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.392255 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5779a321-8268-424b-bef2-08e0dd158ebc" containerName="copy" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.392278 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="5779a321-8268-424b-bef2-08e0dd158ebc" containerName="gather" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.394234 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.407538 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7rtk6"] Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.429530 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcwfg\" (UniqueName: \"kubernetes.io/projected/315fe50e-953c-47b3-ba6d-d2a1aef6133a-kube-api-access-xcwfg\") pod \"community-operators-7rtk6\" (UID: \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\") " pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.429682 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/315fe50e-953c-47b3-ba6d-d2a1aef6133a-utilities\") pod \"community-operators-7rtk6\" (UID: \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\") " pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.429822 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/315fe50e-953c-47b3-ba6d-d2a1aef6133a-catalog-content\") pod \"community-operators-7rtk6\" (UID: \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\") " pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.531919 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/315fe50e-953c-47b3-ba6d-d2a1aef6133a-catalog-content\") pod \"community-operators-7rtk6\" (UID: \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\") " pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.532005 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcwfg\" (UniqueName: \"kubernetes.io/projected/315fe50e-953c-47b3-ba6d-d2a1aef6133a-kube-api-access-xcwfg\") pod \"community-operators-7rtk6\" (UID: \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\") " pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.532114 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/315fe50e-953c-47b3-ba6d-d2a1aef6133a-utilities\") pod \"community-operators-7rtk6\" (UID: \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\") " pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.532854 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/315fe50e-953c-47b3-ba6d-d2a1aef6133a-catalog-content\") pod \"community-operators-7rtk6\" (UID: \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\") " pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.532876 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/315fe50e-953c-47b3-ba6d-d2a1aef6133a-utilities\") pod \"community-operators-7rtk6\" (UID: \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\") " pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.553796 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcwfg\" (UniqueName: \"kubernetes.io/projected/315fe50e-953c-47b3-ba6d-d2a1aef6133a-kube-api-access-xcwfg\") pod \"community-operators-7rtk6\" (UID: \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\") " pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:47 crc kubenswrapper[5002]: I1203 18:27:47.723434 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:48 crc kubenswrapper[5002]: I1203 18:27:48.324085 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7rtk6"] Dec 03 18:27:49 crc kubenswrapper[5002]: I1203 18:27:49.088111 5002 generic.go:334] "Generic (PLEG): container finished" podID="315fe50e-953c-47b3-ba6d-d2a1aef6133a" containerID="13af5e3c25fe3927b209a4b8b03550371f298ea8769304700b397761c06e82f1" exitCode=0 Dec 03 18:27:49 crc kubenswrapper[5002]: I1203 18:27:49.088445 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rtk6" event={"ID":"315fe50e-953c-47b3-ba6d-d2a1aef6133a","Type":"ContainerDied","Data":"13af5e3c25fe3927b209a4b8b03550371f298ea8769304700b397761c06e82f1"} Dec 03 18:27:49 crc kubenswrapper[5002]: I1203 18:27:49.088479 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rtk6" event={"ID":"315fe50e-953c-47b3-ba6d-d2a1aef6133a","Type":"ContainerStarted","Data":"47d0403dc67ed7e09242556463b64b987b5e59b12bb8164b83abf824065b7d34"} Dec 03 18:27:49 crc kubenswrapper[5002]: I1203 18:27:49.090510 5002 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 18:27:49 crc kubenswrapper[5002]: I1203 18:27:49.712190 5002 scope.go:117] "RemoveContainer" containerID="f3862aa55aeddc92a972ce94167f7ec087c01f2148302b6b220f5fea0c158742" Dec 03 18:27:49 crc kubenswrapper[5002]: I1203 18:27:49.740526 5002 scope.go:117] "RemoveContainer" containerID="dcc71c7b1dcd7d2737a9dc04e7b4f091a08253f1d4cd064c05330cbfe0ab3d5d" Dec 03 18:27:49 crc kubenswrapper[5002]: I1203 18:27:49.818449 5002 scope.go:117] "RemoveContainer" containerID="60796b4ea23d163d7fae93c3070ed6269d66a1216cb3ed1686c19c10467c35fe" Dec 03 18:27:51 crc kubenswrapper[5002]: I1203 18:27:51.114857 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rtk6" event={"ID":"315fe50e-953c-47b3-ba6d-d2a1aef6133a","Type":"ContainerStarted","Data":"23196302becc0102484eafee4871ccf373ffc679f617a2e40c6bce0d7c4ec6b3"} Dec 03 18:27:53 crc kubenswrapper[5002]: I1203 18:27:53.139674 5002 generic.go:334] "Generic (PLEG): container finished" podID="315fe50e-953c-47b3-ba6d-d2a1aef6133a" containerID="23196302becc0102484eafee4871ccf373ffc679f617a2e40c6bce0d7c4ec6b3" exitCode=0 Dec 03 18:27:53 crc kubenswrapper[5002]: I1203 18:27:53.139710 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rtk6" event={"ID":"315fe50e-953c-47b3-ba6d-d2a1aef6133a","Type":"ContainerDied","Data":"23196302becc0102484eafee4871ccf373ffc679f617a2e40c6bce0d7c4ec6b3"} Dec 03 18:27:55 crc kubenswrapper[5002]: I1203 18:27:55.161807 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rtk6" event={"ID":"315fe50e-953c-47b3-ba6d-d2a1aef6133a","Type":"ContainerStarted","Data":"9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51"} Dec 03 18:27:55 crc kubenswrapper[5002]: I1203 18:27:55.185421 5002 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7rtk6" podStartSLOduration=2.997377422 podStartE2EDuration="8.185372973s" podCreationTimestamp="2025-12-03 18:27:47 +0000 UTC" firstStartedPulling="2025-12-03 18:27:49.09010826 +0000 UTC m=+6992.503930188" lastFinishedPulling="2025-12-03 18:27:54.278103851 +0000 UTC m=+6997.691925739" observedRunningTime="2025-12-03 18:27:55.183476552 +0000 UTC m=+6998.597298460" watchObservedRunningTime="2025-12-03 18:27:55.185372973 +0000 UTC m=+6998.599194871" Dec 03 18:27:55 crc kubenswrapper[5002]: I1203 18:27:55.840145 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:27:55 crc kubenswrapper[5002]: E1203 18:27:55.840486 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:27:57 crc kubenswrapper[5002]: I1203 18:27:57.723540 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:57 crc kubenswrapper[5002]: I1203 18:27:57.723844 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:27:57 crc kubenswrapper[5002]: I1203 18:27:57.784297 5002 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:28:07 crc kubenswrapper[5002]: I1203 18:28:07.797040 5002 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:28:07 crc kubenswrapper[5002]: I1203 18:28:07.849318 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7rtk6"] Dec 03 18:28:08 crc kubenswrapper[5002]: I1203 18:28:08.283559 5002 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7rtk6" podUID="315fe50e-953c-47b3-ba6d-d2a1aef6133a" containerName="registry-server" containerID="cri-o://9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51" gracePeriod=2 Dec 03 18:28:08 crc kubenswrapper[5002]: I1203 18:28:08.764392 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:28:08 crc kubenswrapper[5002]: I1203 18:28:08.842642 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:28:08 crc kubenswrapper[5002]: E1203 18:28:08.843958 5002 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-bzb7f_openshift-machine-config-operator(c1d64ada-fbf9-4b0e-abb6-9b29bfec7309)\"" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" Dec 03 18:28:08 crc kubenswrapper[5002]: I1203 18:28:08.906734 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcwfg\" (UniqueName: \"kubernetes.io/projected/315fe50e-953c-47b3-ba6d-d2a1aef6133a-kube-api-access-xcwfg\") pod \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\" (UID: \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\") " Dec 03 18:28:08 crc kubenswrapper[5002]: I1203 18:28:08.907121 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/315fe50e-953c-47b3-ba6d-d2a1aef6133a-catalog-content\") pod \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\" (UID: \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\") " Dec 03 18:28:08 crc kubenswrapper[5002]: I1203 18:28:08.907390 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/315fe50e-953c-47b3-ba6d-d2a1aef6133a-utilities\") pod \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\" (UID: \"315fe50e-953c-47b3-ba6d-d2a1aef6133a\") " Dec 03 18:28:08 crc kubenswrapper[5002]: I1203 18:28:08.908166 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/315fe50e-953c-47b3-ba6d-d2a1aef6133a-utilities" (OuterVolumeSpecName: "utilities") pod "315fe50e-953c-47b3-ba6d-d2a1aef6133a" (UID: "315fe50e-953c-47b3-ba6d-d2a1aef6133a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:28:08 crc kubenswrapper[5002]: I1203 18:28:08.908433 5002 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/315fe50e-953c-47b3-ba6d-d2a1aef6133a-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 18:28:08 crc kubenswrapper[5002]: I1203 18:28:08.928690 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/315fe50e-953c-47b3-ba6d-d2a1aef6133a-kube-api-access-xcwfg" (OuterVolumeSpecName: "kube-api-access-xcwfg") pod "315fe50e-953c-47b3-ba6d-d2a1aef6133a" (UID: "315fe50e-953c-47b3-ba6d-d2a1aef6133a"). InnerVolumeSpecName "kube-api-access-xcwfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:28:08 crc kubenswrapper[5002]: I1203 18:28:08.983857 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/315fe50e-953c-47b3-ba6d-d2a1aef6133a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "315fe50e-953c-47b3-ba6d-d2a1aef6133a" (UID: "315fe50e-953c-47b3-ba6d-d2a1aef6133a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.012232 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcwfg\" (UniqueName: \"kubernetes.io/projected/315fe50e-953c-47b3-ba6d-d2a1aef6133a-kube-api-access-xcwfg\") on node \"crc\" DevicePath \"\"" Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.012276 5002 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/315fe50e-953c-47b3-ba6d-d2a1aef6133a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.299335 5002 generic.go:334] "Generic (PLEG): container finished" podID="315fe50e-953c-47b3-ba6d-d2a1aef6133a" containerID="9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51" exitCode=0 Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.299400 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rtk6" event={"ID":"315fe50e-953c-47b3-ba6d-d2a1aef6133a","Type":"ContainerDied","Data":"9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51"} Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.299450 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rtk6" event={"ID":"315fe50e-953c-47b3-ba6d-d2a1aef6133a","Type":"ContainerDied","Data":"47d0403dc67ed7e09242556463b64b987b5e59b12bb8164b83abf824065b7d34"} Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.299485 5002 scope.go:117] "RemoveContainer" containerID="9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51" Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.299793 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7rtk6" Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.350993 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7rtk6"] Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.351861 5002 scope.go:117] "RemoveContainer" containerID="23196302becc0102484eafee4871ccf373ffc679f617a2e40c6bce0d7c4ec6b3" Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.361608 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7rtk6"] Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.376804 5002 scope.go:117] "RemoveContainer" containerID="13af5e3c25fe3927b209a4b8b03550371f298ea8769304700b397761c06e82f1" Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.419643 5002 scope.go:117] "RemoveContainer" containerID="9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51" Dec 03 18:28:09 crc kubenswrapper[5002]: E1203 18:28:09.420262 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51\": container with ID starting with 9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51 not found: ID does not exist" containerID="9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51" Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.420310 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51"} err="failed to get container status \"9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51\": rpc error: code = NotFound desc = could not find container \"9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51\": container with ID starting with 9a090b42948a23ff78a7f5aca98bdd6113882f3898780ad54c74aeff85029c51 not found: ID does not exist" Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.420335 5002 scope.go:117] "RemoveContainer" containerID="23196302becc0102484eafee4871ccf373ffc679f617a2e40c6bce0d7c4ec6b3" Dec 03 18:28:09 crc kubenswrapper[5002]: E1203 18:28:09.421512 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23196302becc0102484eafee4871ccf373ffc679f617a2e40c6bce0d7c4ec6b3\": container with ID starting with 23196302becc0102484eafee4871ccf373ffc679f617a2e40c6bce0d7c4ec6b3 not found: ID does not exist" containerID="23196302becc0102484eafee4871ccf373ffc679f617a2e40c6bce0d7c4ec6b3" Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.421557 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23196302becc0102484eafee4871ccf373ffc679f617a2e40c6bce0d7c4ec6b3"} err="failed to get container status \"23196302becc0102484eafee4871ccf373ffc679f617a2e40c6bce0d7c4ec6b3\": rpc error: code = NotFound desc = could not find container \"23196302becc0102484eafee4871ccf373ffc679f617a2e40c6bce0d7c4ec6b3\": container with ID starting with 23196302becc0102484eafee4871ccf373ffc679f617a2e40c6bce0d7c4ec6b3 not found: ID does not exist" Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.421584 5002 scope.go:117] "RemoveContainer" containerID="13af5e3c25fe3927b209a4b8b03550371f298ea8769304700b397761c06e82f1" Dec 03 18:28:09 crc kubenswrapper[5002]: E1203 18:28:09.421957 5002 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13af5e3c25fe3927b209a4b8b03550371f298ea8769304700b397761c06e82f1\": container with ID starting with 13af5e3c25fe3927b209a4b8b03550371f298ea8769304700b397761c06e82f1 not found: ID does not exist" containerID="13af5e3c25fe3927b209a4b8b03550371f298ea8769304700b397761c06e82f1" Dec 03 18:28:09 crc kubenswrapper[5002]: I1203 18:28:09.422065 5002 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13af5e3c25fe3927b209a4b8b03550371f298ea8769304700b397761c06e82f1"} err="failed to get container status \"13af5e3c25fe3927b209a4b8b03550371f298ea8769304700b397761c06e82f1\": rpc error: code = NotFound desc = could not find container \"13af5e3c25fe3927b209a4b8b03550371f298ea8769304700b397761c06e82f1\": container with ID starting with 13af5e3c25fe3927b209a4b8b03550371f298ea8769304700b397761c06e82f1 not found: ID does not exist" Dec 03 18:28:10 crc kubenswrapper[5002]: I1203 18:28:10.856507 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="315fe50e-953c-47b3-ba6d-d2a1aef6133a" path="/var/lib/kubelet/pods/315fe50e-953c-47b3-ba6d-d2a1aef6133a/volumes" Dec 03 18:28:23 crc kubenswrapper[5002]: I1203 18:28:23.841402 5002 scope.go:117] "RemoveContainer" containerID="f274776554920c013a14fd07c02ac9f240cdcb8997dbced6ca7b401ada1b1f2a" Dec 03 18:28:24 crc kubenswrapper[5002]: I1203 18:28:24.514578 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" event={"ID":"c1d64ada-fbf9-4b0e-abb6-9b29bfec7309","Type":"ContainerStarted","Data":"1f5bcfe3c094c07e77930e50fa5b20aab9ac73f996f8e7f12e85e5ac8de1ae47"} Dec 03 18:28:50 crc kubenswrapper[5002]: I1203 18:28:50.089243 5002 scope.go:117] "RemoveContainer" containerID="a9687978fcadcf007983ae67e7915a89e0d2295b48c12657a459dfc9e3aadef8" Dec 03 18:29:56 crc kubenswrapper[5002]: I1203 18:29:56.043305 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-rbf5d"] Dec 03 18:29:56 crc kubenswrapper[5002]: I1203 18:29:56.055395 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-6a1c-account-create-update-5dx2j"] Dec 03 18:29:56 crc kubenswrapper[5002]: I1203 18:29:56.063015 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-6a1c-account-create-update-5dx2j"] Dec 03 18:29:56 crc kubenswrapper[5002]: I1203 18:29:56.092505 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-rbf5d"] Dec 03 18:29:56 crc kubenswrapper[5002]: I1203 18:29:56.859337 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6acc2704-72e1-4467-932f-8cb49c2eb422" path="/var/lib/kubelet/pods/6acc2704-72e1-4467-932f-8cb49c2eb422/volumes" Dec 03 18:29:56 crc kubenswrapper[5002]: I1203 18:29:56.860965 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74bd124f-de70-4274-86be-640d56813b9f" path="/var/lib/kubelet/pods/74bd124f-de70-4274-86be-640d56813b9f/volumes" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.149442 5002 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv"] Dec 03 18:30:00 crc kubenswrapper[5002]: E1203 18:30:00.150500 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315fe50e-953c-47b3-ba6d-d2a1aef6133a" containerName="registry-server" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.150515 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="315fe50e-953c-47b3-ba6d-d2a1aef6133a" containerName="registry-server" Dec 03 18:30:00 crc kubenswrapper[5002]: E1203 18:30:00.150541 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315fe50e-953c-47b3-ba6d-d2a1aef6133a" containerName="extract-content" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.150547 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="315fe50e-953c-47b3-ba6d-d2a1aef6133a" containerName="extract-content" Dec 03 18:30:00 crc kubenswrapper[5002]: E1203 18:30:00.150561 5002 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315fe50e-953c-47b3-ba6d-d2a1aef6133a" containerName="extract-utilities" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.150567 5002 state_mem.go:107] "Deleted CPUSet assignment" podUID="315fe50e-953c-47b3-ba6d-d2a1aef6133a" containerName="extract-utilities" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.150814 5002 memory_manager.go:354] "RemoveStaleState removing state" podUID="315fe50e-953c-47b3-ba6d-d2a1aef6133a" containerName="registry-server" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.151617 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.155685 5002 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.156106 5002 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.175133 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv"] Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.184263 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd3168df-7889-4352-bcd8-d3895972d38d-secret-volume\") pod \"collect-profiles-29413110-8sbgv\" (UID: \"bd3168df-7889-4352-bcd8-d3895972d38d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.184346 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd3168df-7889-4352-bcd8-d3895972d38d-config-volume\") pod \"collect-profiles-29413110-8sbgv\" (UID: \"bd3168df-7889-4352-bcd8-d3895972d38d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.184486 5002 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4r4w\" (UniqueName: \"kubernetes.io/projected/bd3168df-7889-4352-bcd8-d3895972d38d-kube-api-access-k4r4w\") pod \"collect-profiles-29413110-8sbgv\" (UID: \"bd3168df-7889-4352-bcd8-d3895972d38d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.285838 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd3168df-7889-4352-bcd8-d3895972d38d-secret-volume\") pod \"collect-profiles-29413110-8sbgv\" (UID: \"bd3168df-7889-4352-bcd8-d3895972d38d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.286132 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd3168df-7889-4352-bcd8-d3895972d38d-config-volume\") pod \"collect-profiles-29413110-8sbgv\" (UID: \"bd3168df-7889-4352-bcd8-d3895972d38d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.286288 5002 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4r4w\" (UniqueName: \"kubernetes.io/projected/bd3168df-7889-4352-bcd8-d3895972d38d-kube-api-access-k4r4w\") pod \"collect-profiles-29413110-8sbgv\" (UID: \"bd3168df-7889-4352-bcd8-d3895972d38d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.287431 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd3168df-7889-4352-bcd8-d3895972d38d-config-volume\") pod \"collect-profiles-29413110-8sbgv\" (UID: \"bd3168df-7889-4352-bcd8-d3895972d38d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.292436 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd3168df-7889-4352-bcd8-d3895972d38d-secret-volume\") pod \"collect-profiles-29413110-8sbgv\" (UID: \"bd3168df-7889-4352-bcd8-d3895972d38d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.306112 5002 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4r4w\" (UniqueName: \"kubernetes.io/projected/bd3168df-7889-4352-bcd8-d3895972d38d-kube-api-access-k4r4w\") pod \"collect-profiles-29413110-8sbgv\" (UID: \"bd3168df-7889-4352-bcd8-d3895972d38d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.478798 5002 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:00 crc kubenswrapper[5002]: I1203 18:30:00.934398 5002 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv"] Dec 03 18:30:00 crc kubenswrapper[5002]: W1203 18:30:00.938861 5002 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd3168df_7889_4352_bcd8_d3895972d38d.slice/crio-367676ac50b7c2cff92c692e1aaa4d7d54539b6a93bcf357f45862b99b49e832 WatchSource:0}: Error finding container 367676ac50b7c2cff92c692e1aaa4d7d54539b6a93bcf357f45862b99b49e832: Status 404 returned error can't find the container with id 367676ac50b7c2cff92c692e1aaa4d7d54539b6a93bcf357f45862b99b49e832 Dec 03 18:30:01 crc kubenswrapper[5002]: I1203 18:30:01.703721 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" event={"ID":"bd3168df-7889-4352-bcd8-d3895972d38d","Type":"ContainerStarted","Data":"eefa40edfa77ab493b64729e58b6e39b4431d8aa3213fc8619a89c60619ef66d"} Dec 03 18:30:01 crc kubenswrapper[5002]: I1203 18:30:01.704107 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" event={"ID":"bd3168df-7889-4352-bcd8-d3895972d38d","Type":"ContainerStarted","Data":"367676ac50b7c2cff92c692e1aaa4d7d54539b6a93bcf357f45862b99b49e832"} Dec 03 18:30:02 crc kubenswrapper[5002]: I1203 18:30:02.713375 5002 generic.go:334] "Generic (PLEG): container finished" podID="bd3168df-7889-4352-bcd8-d3895972d38d" containerID="eefa40edfa77ab493b64729e58b6e39b4431d8aa3213fc8619a89c60619ef66d" exitCode=0 Dec 03 18:30:02 crc kubenswrapper[5002]: I1203 18:30:02.713442 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" event={"ID":"bd3168df-7889-4352-bcd8-d3895972d38d","Type":"ContainerDied","Data":"eefa40edfa77ab493b64729e58b6e39b4431d8aa3213fc8619a89c60619ef66d"} Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.050198 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.178871 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd3168df-7889-4352-bcd8-d3895972d38d-secret-volume\") pod \"bd3168df-7889-4352-bcd8-d3895972d38d\" (UID: \"bd3168df-7889-4352-bcd8-d3895972d38d\") " Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.179137 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4r4w\" (UniqueName: \"kubernetes.io/projected/bd3168df-7889-4352-bcd8-d3895972d38d-kube-api-access-k4r4w\") pod \"bd3168df-7889-4352-bcd8-d3895972d38d\" (UID: \"bd3168df-7889-4352-bcd8-d3895972d38d\") " Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.179166 5002 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd3168df-7889-4352-bcd8-d3895972d38d-config-volume\") pod \"bd3168df-7889-4352-bcd8-d3895972d38d\" (UID: \"bd3168df-7889-4352-bcd8-d3895972d38d\") " Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.180346 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd3168df-7889-4352-bcd8-d3895972d38d-config-volume" (OuterVolumeSpecName: "config-volume") pod "bd3168df-7889-4352-bcd8-d3895972d38d" (UID: "bd3168df-7889-4352-bcd8-d3895972d38d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.185687 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd3168df-7889-4352-bcd8-d3895972d38d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "bd3168df-7889-4352-bcd8-d3895972d38d" (UID: "bd3168df-7889-4352-bcd8-d3895972d38d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.186179 5002 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd3168df-7889-4352-bcd8-d3895972d38d-kube-api-access-k4r4w" (OuterVolumeSpecName: "kube-api-access-k4r4w") pod "bd3168df-7889-4352-bcd8-d3895972d38d" (UID: "bd3168df-7889-4352-bcd8-d3895972d38d"). InnerVolumeSpecName "kube-api-access-k4r4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.281579 5002 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4r4w\" (UniqueName: \"kubernetes.io/projected/bd3168df-7889-4352-bcd8-d3895972d38d-kube-api-access-k4r4w\") on node \"crc\" DevicePath \"\"" Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.281966 5002 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd3168df-7889-4352-bcd8-d3895972d38d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.281980 5002 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd3168df-7889-4352-bcd8-d3895972d38d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.737431 5002 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" event={"ID":"bd3168df-7889-4352-bcd8-d3895972d38d","Type":"ContainerDied","Data":"367676ac50b7c2cff92c692e1aaa4d7d54539b6a93bcf357f45862b99b49e832"} Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.737481 5002 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="367676ac50b7c2cff92c692e1aaa4d7d54539b6a93bcf357f45862b99b49e832" Dec 03 18:30:04 crc kubenswrapper[5002]: I1203 18:30:04.737544 5002 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413110-8sbgv" Dec 03 18:30:05 crc kubenswrapper[5002]: I1203 18:30:05.578590 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6"] Dec 03 18:30:05 crc kubenswrapper[5002]: I1203 18:30:05.590687 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413065-55km6"] Dec 03 18:30:06 crc kubenswrapper[5002]: I1203 18:30:06.032886 5002 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-4ndlh"] Dec 03 18:30:06 crc kubenswrapper[5002]: I1203 18:30:06.044828 5002 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-4ndlh"] Dec 03 18:30:06 crc kubenswrapper[5002]: I1203 18:30:06.854610 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a5ec183-be05-4b60-8c77-18d67398bc24" path="/var/lib/kubelet/pods/4a5ec183-be05-4b60-8c77-18d67398bc24/volumes" Dec 03 18:30:06 crc kubenswrapper[5002]: I1203 18:30:06.857448 5002 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be1e8c59-f401-4ea7-aada-c65cb303729a" path="/var/lib/kubelet/pods/be1e8c59-f401-4ea7-aada-c65cb303729a/volumes" Dec 03 18:30:50 crc kubenswrapper[5002]: I1203 18:30:50.207600 5002 scope.go:117] "RemoveContainer" containerID="d9dbe9eea4ececcb0db6edaf653d16331c309e51d7e2377db15889a711d36c5d" Dec 03 18:30:50 crc kubenswrapper[5002]: I1203 18:30:50.246339 5002 scope.go:117] "RemoveContainer" containerID="d37f423569a59d69e405c6d9d76f046494a6a26c4ff1e1b7261afb8716ab39c9" Dec 03 18:30:50 crc kubenswrapper[5002]: I1203 18:30:50.293398 5002 scope.go:117] "RemoveContainer" containerID="aa3a6248bb956a638899d6e4144dc1d07e694f900414fd37cf9ac8ae3cb05e93" Dec 03 18:30:50 crc kubenswrapper[5002]: I1203 18:30:50.354437 5002 scope.go:117] "RemoveContainer" containerID="c2c4ef341a19b1f5fd7c1a6c2ced9a6699f6b383ee6a8b4cb7e71dec25437775" Dec 03 18:30:50 crc kubenswrapper[5002]: I1203 18:30:50.916198 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:30:50 crc kubenswrapper[5002]: I1203 18:30:50.916251 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 18:31:20 crc kubenswrapper[5002]: I1203 18:31:20.916926 5002 patch_prober.go:28] interesting pod/machine-config-daemon-bzb7f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 18:31:20 crc kubenswrapper[5002]: I1203 18:31:20.917556 5002 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bzb7f" podUID="c1d64ada-fbf9-4b0e-abb6-9b29bfec7309" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114101011024426 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114101012017344 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114062366016511 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114062367015462 5ustar corecore